mirror of
https://github.com/nushell/nushell.git
synced 2025-06-05 17:46:44 +02:00
feat(lsp): cancellable heavy requests (#14851)
<!-- if this PR closes one or more issues, you can automatically link the PR with them by using one of the [*linking keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword), e.g. - this PR should close #xxxx - fixes #xxxx you can also mention related issues, PRs or discussions! --> # Description <!-- Thank you for improving Nushell. Please, check our [contributing guide](../CONTRIBUTING.md) and talk to the core team before making major changes. Description of your pull request goes here. **Provide examples and/or screenshots** if your changes affect the user experience. --> `tower-lsp` seems not well-maintained, I ended up with a dedicated thread for heavy computing and message passing to cancel it on any new request. During the progress, interrupting with edits or new requests. <img width="522" alt="image" src="https://github.com/user-attachments/assets/b263d73d-8ea3-4b26-a7b7-e0b30462d1af" /> Goto references are still blocking, with a hard timeout of 5 seconds. Only locations found within the time limit are returned. Technically, reference requests allow for responses with partial results, which means instant responsiveness. However, the `lsp_types` crate hasn’t enabled this. I believe I can still enable it with some JSON manipulation, but I’ll leave it for future work. # User-Facing Changes <!-- List of all changes that impact the user experience here. This helps us keep track of breaking changes. --> # Tests + Formatting <!-- Don't forget to add tests that cover your changes. Make sure you've run and fixed any issues with these commands: - `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes) - `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style - `cargo test --workspace` to check that all tests pass (on Windows make sure to [enable developer mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging)) - `cargo run -- -c "use toolkit.nu; toolkit test stdlib"` to run the tests for the standard library > **Note** > from `nushell` you can also use the `toolkit` as follows > ```bash > use toolkit.nu # or use an `env_change` hook to activate it automatically > toolkit check pr > ``` --> Need some clever way to test the cancellation, no test cases added yet. # After Submitting <!-- If your PR had any user-facing changes, update [the documentation](https://github.com/nushell/nushell.github.io) after the PR is merged, if necessary. This will help us keep the docs up to date. -->
This commit is contained in:
parent
75105033b2
commit
3f5ebd75b6
@ -3,24 +3,30 @@ use lsp_types::{
|
||||
notification::{Notification, PublishDiagnostics},
|
||||
Diagnostic, DiagnosticSeverity, PublishDiagnosticsParams, Uri,
|
||||
};
|
||||
use miette::{IntoDiagnostic, Result};
|
||||
use miette::{miette, IntoDiagnostic, Result};
|
||||
|
||||
impl LanguageServer {
|
||||
pub(crate) fn publish_diagnostics_for_file(&mut self, uri: Uri) -> Result<()> {
|
||||
let mut engine_state = self.new_engine_state();
|
||||
engine_state.generate_nu_constant();
|
||||
|
||||
let Some((_, offset, working_set, file)) = self.parse_file(&mut engine_state, &uri, true)
|
||||
else {
|
||||
let Some((_, offset, working_set)) = self.parse_file(&mut engine_state, &uri, true) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let mut diagnostics = PublishDiagnosticsParams {
|
||||
uri,
|
||||
uri: uri.clone(),
|
||||
diagnostics: Vec::new(),
|
||||
version: None,
|
||||
};
|
||||
|
||||
let docs = match self.docs.lock() {
|
||||
Ok(it) => it,
|
||||
Err(err) => return Err(miette!(err.to_string())),
|
||||
};
|
||||
let file = docs
|
||||
.get_document(&uri)
|
||||
.ok_or_else(|| miette!("\nFailed to get document"))?;
|
||||
for err in working_set.parse_errors.iter() {
|
||||
let message = err.to_string();
|
||||
|
||||
|
@ -33,7 +33,7 @@ impl LanguageServer {
|
||||
.text_document
|
||||
.uri
|
||||
.to_owned();
|
||||
let (working_set, id, _, _, _) = self
|
||||
let (working_set, id, _, _) = self
|
||||
.parse_and_find(
|
||||
&mut engine_state,
|
||||
&path_uri,
|
||||
|
@ -1,5 +1,6 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
use ast::find_id;
|
||||
use crossbeam_channel::{Receiver, Sender};
|
||||
use lsp_server::{Connection, IoThreads, Message, Response, ResponseError};
|
||||
use lsp_textdocument::{FullTextDocument, TextDocuments};
|
||||
use lsp_types::{
|
||||
@ -18,7 +19,7 @@ use nu_protocol::{
|
||||
engine::{CachedFile, EngineState, Stack, StateWorkingSet},
|
||||
DeclId, ModuleId, Span, Type, Value, VarId,
|
||||
};
|
||||
use std::collections::BTreeMap;
|
||||
use std::{collections::BTreeMap, sync::Mutex};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
str::FromStr,
|
||||
@ -27,6 +28,7 @@ use std::{
|
||||
};
|
||||
use symbols::SymbolCache;
|
||||
use url::Url;
|
||||
use workspace::{InternalMessage, RangePerDoc};
|
||||
|
||||
mod ast;
|
||||
mod diagnostics;
|
||||
@ -47,13 +49,14 @@ pub enum Id {
|
||||
pub struct LanguageServer {
|
||||
connection: Connection,
|
||||
io_threads: Option<IoThreads>,
|
||||
docs: TextDocuments,
|
||||
docs: Arc<Mutex<TextDocuments>>,
|
||||
engine_state: EngineState,
|
||||
symbol_cache: SymbolCache,
|
||||
inlay_hints: BTreeMap<Uri, Vec<InlayHint>>,
|
||||
workspace_folders: BTreeMap<String, WorkspaceFolder>,
|
||||
// for workspace wide requests
|
||||
occurrences: BTreeMap<Uri, Vec<Range>>,
|
||||
channels: Option<(Sender<bool>, Arc<Receiver<InternalMessage>>)>,
|
||||
}
|
||||
|
||||
pub fn path_to_uri(path: impl AsRef<Path>) -> Uri {
|
||||
@ -92,12 +95,13 @@ impl LanguageServer {
|
||||
Ok(Self {
|
||||
connection,
|
||||
io_threads,
|
||||
docs: TextDocuments::new(),
|
||||
docs: Arc::new(Mutex::new(TextDocuments::new())),
|
||||
engine_state,
|
||||
symbol_cache: SymbolCache::new(),
|
||||
inlay_hints: BTreeMap::new(),
|
||||
workspace_folders: BTreeMap::new(),
|
||||
occurrences: BTreeMap::new(),
|
||||
channels: None,
|
||||
})
|
||||
}
|
||||
|
||||
@ -141,12 +145,19 @@ impl LanguageServer {
|
||||
self.initialize_workspace_folders(init_params)?;
|
||||
|
||||
while !self.engine_state.signals().interrupted() {
|
||||
// first check new messages from child thread
|
||||
self.handle_internal_messages()?;
|
||||
|
||||
let msg = match self
|
||||
.connection
|
||||
.receiver
|
||||
.recv_timeout(Duration::from_secs(1))
|
||||
{
|
||||
Ok(msg) => msg,
|
||||
Ok(msg) => {
|
||||
// cancel execution if other messages received before job done
|
||||
self.cancel_background_thread();
|
||||
msg
|
||||
}
|
||||
Err(crossbeam_channel::RecvTimeoutError::Timeout) => {
|
||||
continue;
|
||||
}
|
||||
@ -177,7 +188,9 @@ impl LanguageServer {
|
||||
Self::handle_lsp_request(request, |params| self.document_symbol(params))
|
||||
}
|
||||
request::References::METHOD => {
|
||||
Self::handle_lsp_request(request, |params| self.references(params))
|
||||
Self::handle_lsp_request(request, |params| {
|
||||
self.references(params, 5000)
|
||||
})
|
||||
}
|
||||
request::WorkspaceSymbolRequest::METHOD => {
|
||||
Self::handle_lsp_request(request, |params| {
|
||||
@ -224,6 +237,42 @@ impl LanguageServer {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send a cancel message to a running bg thread
|
||||
pub fn cancel_background_thread(&mut self) {
|
||||
if let Some((sender, _)) = &self.channels {
|
||||
sender.send(true).ok();
|
||||
}
|
||||
}
|
||||
|
||||
/// Check results from background thread
|
||||
pub fn handle_internal_messages(&mut self) -> Result<bool> {
|
||||
let mut reset = false;
|
||||
if let Some((_, receiver)) = &self.channels {
|
||||
for im in receiver.try_iter() {
|
||||
match im {
|
||||
InternalMessage::RangeMessage(RangePerDoc { uri, ranges }) => {
|
||||
self.occurrences.insert(uri, ranges);
|
||||
}
|
||||
InternalMessage::OnGoing(token, progress) => {
|
||||
self.send_progress_report(token, progress, None)?;
|
||||
}
|
||||
InternalMessage::Finished(token) => {
|
||||
reset = true;
|
||||
self.send_progress_end(token, Some("Finished.".to_string()))?;
|
||||
}
|
||||
InternalMessage::Cancelled(token) => {
|
||||
reset = true;
|
||||
self.send_progress_end(token, Some("interrupted.".to_string()))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if reset {
|
||||
self.channels = None;
|
||||
}
|
||||
Ok(reset)
|
||||
}
|
||||
|
||||
pub fn new_engine_state(&self) -> EngineState {
|
||||
let mut engine_state = self.engine_state.clone();
|
||||
let cwd = std::env::current_dir().expect("Could not get current working directory.");
|
||||
@ -236,17 +285,24 @@ impl LanguageServer {
|
||||
engine_state: &'a mut EngineState,
|
||||
uri: &Uri,
|
||||
pos: Position,
|
||||
) -> Result<(StateWorkingSet<'a>, Id, Span, usize, &FullTextDocument)> {
|
||||
let (block, file_offset, mut working_set, file) = self
|
||||
) -> Result<(StateWorkingSet<'a>, Id, Span, usize)> {
|
||||
let (block, file_offset, mut working_set) = self
|
||||
.parse_file(engine_state, uri, false)
|
||||
.ok_or_else(|| miette!("\nFailed to parse current file"))?;
|
||||
|
||||
let docs = match self.docs.lock() {
|
||||
Ok(it) => it,
|
||||
Err(err) => return Err(miette!(err.to_string())),
|
||||
};
|
||||
let file = docs
|
||||
.get_document(uri)
|
||||
.ok_or_else(|| miette!("\nFailed to get document"))?;
|
||||
let location = file.offset_at(pos) as usize + file_offset;
|
||||
let (id, span) = find_id(&block, &working_set, &location)
|
||||
.ok_or_else(|| miette!("\nFailed to find current name"))?;
|
||||
// add block to working_set for later references
|
||||
working_set.add_block(block);
|
||||
Ok((working_set, id, span, file_offset, file))
|
||||
Ok((working_set, id, span, file_offset))
|
||||
}
|
||||
|
||||
pub fn parse_file<'a>(
|
||||
@ -254,9 +310,10 @@ impl LanguageServer {
|
||||
engine_state: &'a mut EngineState,
|
||||
uri: &Uri,
|
||||
need_hints: bool,
|
||||
) -> Option<(Arc<Block>, usize, StateWorkingSet<'a>, &FullTextDocument)> {
|
||||
) -> Option<(Arc<Block>, usize, StateWorkingSet<'a>)> {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
let file = self.docs.get_document(uri)?;
|
||||
let docs = self.docs.lock().ok()?;
|
||||
let file = docs.get_document(uri)?;
|
||||
let file_path = uri_to_path(uri);
|
||||
let file_path_str = file_path.to_str()?;
|
||||
let contents = file.get_content(None).as_bytes();
|
||||
@ -270,7 +327,7 @@ impl LanguageServer {
|
||||
let file_inlay_hints = self.extract_inlay_hints(&working_set, &block, offset, file);
|
||||
self.inlay_hints.insert(uri.clone(), file_inlay_hints);
|
||||
}
|
||||
Some((block, offset, working_set, file))
|
||||
Some((block, offset, working_set))
|
||||
}
|
||||
|
||||
fn get_location_by_span<'a>(
|
||||
@ -285,10 +342,10 @@ impl LanguageServer {
|
||||
return None;
|
||||
}
|
||||
let target_uri = path_to_uri(path);
|
||||
if let Some(doc) = self.docs.get_document(&target_uri) {
|
||||
if let Some(file) = self.docs.lock().ok()?.get_document(&target_uri) {
|
||||
return Some(Location {
|
||||
uri: target_uri,
|
||||
range: span_to_range(span, doc, cached_file.covered_span.start),
|
||||
range: span_to_range(span, file, cached_file.covered_span.start),
|
||||
});
|
||||
} else {
|
||||
// in case where the document is not opened yet, typically included by `nu -I`
|
||||
@ -344,7 +401,7 @@ impl LanguageServer {
|
||||
.text_document
|
||||
.uri
|
||||
.to_owned();
|
||||
let (working_set, id, _, _, _) = self
|
||||
let (working_set, id, _, _) = self
|
||||
.parse_and_find(
|
||||
&mut engine_state,
|
||||
&path_uri,
|
||||
@ -525,7 +582,8 @@ impl LanguageServer {
|
||||
|
||||
fn complete(&mut self, params: &CompletionParams) -> Option<CompletionResponse> {
|
||||
let path_uri = params.text_document_position.text_document.uri.to_owned();
|
||||
let file = self.docs.get_document(&path_uri)?;
|
||||
let docs = self.docs.lock().ok()?;
|
||||
let file = docs.get_document(&path_uri)?;
|
||||
|
||||
let mut completer =
|
||||
NuCompleter::new(Arc::new(self.engine_state.clone()), Arc::new(Stack::new()));
|
||||
|
@ -17,8 +17,8 @@ impl LanguageServer {
|
||||
&mut self,
|
||||
notification: lsp_server::Notification,
|
||||
) -> Option<Uri> {
|
||||
self.docs
|
||||
.listen(notification.method.as_str(), ¬ification.params);
|
||||
let mut docs = self.docs.lock().ok()?;
|
||||
docs.listen(notification.method.as_str(), ¬ification.params);
|
||||
match notification.method.as_str() {
|
||||
DidOpenTextDocument::METHOD => {
|
||||
let params: DidOpenTextDocumentParams =
|
||||
@ -57,7 +57,7 @@ impl LanguageServer {
|
||||
}
|
||||
}
|
||||
|
||||
fn send_progress_notification(
|
||||
pub fn send_progress_notification(
|
||||
&self,
|
||||
token: ProgressToken,
|
||||
value: WorkDoneProgress,
|
||||
@ -74,12 +74,13 @@ impl LanguageServer {
|
||||
.into_diagnostic()
|
||||
}
|
||||
|
||||
pub fn send_progress_begin(&self, token: ProgressToken, title: &str) -> Result<()> {
|
||||
pub fn send_progress_begin(&self, token: ProgressToken, title: String) -> Result<()> {
|
||||
self.send_progress_notification(
|
||||
token,
|
||||
WorkDoneProgress::Begin(WorkDoneProgressBegin {
|
||||
title: title.to_string(),
|
||||
title,
|
||||
percentage: Some(0),
|
||||
cancellable: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
)
|
||||
@ -95,8 +96,8 @@ impl LanguageServer {
|
||||
token,
|
||||
WorkDoneProgress::Report(WorkDoneProgressReport {
|
||||
message,
|
||||
cancellable: Some(true),
|
||||
percentage: Some(percentage),
|
||||
..Default::default()
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
@ -272,7 +272,8 @@ impl LanguageServer {
|
||||
) -> Option<DocumentSymbolResponse> {
|
||||
let engine_state = self.new_engine_state();
|
||||
let uri = params.text_document.uri.to_owned();
|
||||
self.symbol_cache.update(&uri, &engine_state, &self.docs);
|
||||
let docs = self.docs.lock().ok()?;
|
||||
self.symbol_cache.update(&uri, &engine_state, &docs);
|
||||
Some(DocumentSymbolResponse::Flat(
|
||||
self.symbol_cache.get_symbols_by_uri(&uri)?,
|
||||
))
|
||||
@ -284,7 +285,8 @@ impl LanguageServer {
|
||||
) -> Option<WorkspaceSymbolResponse> {
|
||||
if self.symbol_cache.any_dirty() {
|
||||
let engine_state = self.new_engine_state();
|
||||
self.symbol_cache.update_all(&engine_state, &self.docs);
|
||||
let docs = self.docs.lock().ok()?;
|
||||
self.symbol_cache.update_all(&engine_state, &docs);
|
||||
}
|
||||
Some(WorkspaceSymbolResponse::Flat(
|
||||
self.symbol_cache
|
||||
|
@ -4,11 +4,13 @@ use std::{
|
||||
collections::{BTreeMap, HashMap},
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
ast::find_reference_by_id, path_to_uri, span_to_range, uri_to_path, Id, LanguageServer,
|
||||
};
|
||||
use crossbeam_channel::{Receiver, Sender};
|
||||
use lsp_server::{Message, Request, Response};
|
||||
use lsp_types::{
|
||||
Location, PrepareRenameResponse, ProgressToken, Range, ReferenceParams, RenameParams,
|
||||
@ -16,9 +18,75 @@ use lsp_types::{
|
||||
};
|
||||
use miette::{miette, IntoDiagnostic, Result};
|
||||
use nu_glob::{glob, Paths};
|
||||
use nu_protocol::{engine::StateWorkingSet, Span};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use serde_json::Value;
|
||||
|
||||
/// Message type indicating ranges of interest in each doc
|
||||
#[derive(Debug)]
|
||||
pub struct RangePerDoc {
|
||||
pub uri: Uri,
|
||||
pub ranges: Vec<Range>,
|
||||
}
|
||||
|
||||
/// Message sent from background thread to main
|
||||
#[derive(Debug)]
|
||||
pub enum InternalMessage {
|
||||
RangeMessage(RangePerDoc),
|
||||
Cancelled(ProgressToken),
|
||||
Finished(ProgressToken),
|
||||
OnGoing(ProgressToken, u32),
|
||||
}
|
||||
|
||||
fn find_nu_scripts_in_folder(folder_uri: &Uri) -> Result<Paths> {
|
||||
let path = uri_to_path(folder_uri);
|
||||
if !path.is_dir() {
|
||||
return Err(miette!("\nworkspace folder does not exist."));
|
||||
}
|
||||
let pattern = format!("{}/**/*.nu", path.to_string_lossy());
|
||||
glob(&pattern).into_diagnostic()
|
||||
}
|
||||
|
||||
fn find_reference_in_file(
|
||||
working_set: &mut StateWorkingSet,
|
||||
file: &FullTextDocument,
|
||||
fp: &Path,
|
||||
id: &Id,
|
||||
) -> Option<Vec<Range>> {
|
||||
let fp_str = fp.to_str()?;
|
||||
let block = parse(
|
||||
working_set,
|
||||
Some(fp_str),
|
||||
file.get_content(None).as_bytes(),
|
||||
false,
|
||||
);
|
||||
let file_span = working_set.get_span_for_filename(fp_str)?;
|
||||
let offset = file_span.start;
|
||||
let mut references: Vec<Span> = find_reference_by_id(&block, working_set, id);
|
||||
|
||||
// NOTE: for arguments whose declaration is in a signature
|
||||
// which is not covered in the AST
|
||||
if let Id::Variable(vid) = id {
|
||||
let decl_span = working_set.get_variable(*vid).declaration_span;
|
||||
if file_span.contains_span(decl_span)
|
||||
&& decl_span.end > decl_span.start
|
||||
&& !references.contains(&decl_span)
|
||||
{
|
||||
references.push(decl_span);
|
||||
}
|
||||
}
|
||||
let occurs: Vec<Range> = references
|
||||
.iter()
|
||||
.map(|span| span_to_range(span, file, offset))
|
||||
.collect();
|
||||
|
||||
// add_block to avoid repeated parsing
|
||||
working_set.add_block(block);
|
||||
(!occurs.is_empty()).then_some(occurs)
|
||||
}
|
||||
|
||||
impl LanguageServer {
|
||||
/// get initial workspace folders from initialization response
|
||||
pub fn initialize_workspace_folders(&mut self, init_params: Value) -> Result<()> {
|
||||
@ -32,6 +100,8 @@ impl LanguageServer {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// The rename request only happens after the client received a `PrepareRenameResponse`,
|
||||
/// and a new name typed in, could happen before ranges ready for all files in the workspace folder
|
||||
pub fn rename(&mut self, params: &RenameParams) -> Option<WorkspaceEdit> {
|
||||
let new_name = params.new_name.to_owned();
|
||||
// changes in WorkspaceEdit have mutable key
|
||||
@ -59,31 +129,55 @@ impl LanguageServer {
|
||||
}
|
||||
|
||||
/// Goto references response
|
||||
/// TODO: WorkDoneProgress -> PartialResults
|
||||
pub fn references(&mut self, params: &ReferenceParams) -> Option<Vec<Location>> {
|
||||
/// # Arguments
|
||||
/// - `timeout`: timeout in milliseconds, when timeout
|
||||
/// 1. Respond with all ranges found so far
|
||||
/// 2. Cancel the background thread
|
||||
pub fn references(&mut self, params: &ReferenceParams, timeout: u128) -> Option<Vec<Location>> {
|
||||
self.occurrences = BTreeMap::new();
|
||||
let mut engine_state = self.new_engine_state();
|
||||
let path_uri = params.text_document_position.text_document.uri.to_owned();
|
||||
let (mut working_set, id, span, _, _) = self
|
||||
let (working_set, id, span, _) = self
|
||||
.parse_and_find(
|
||||
&mut engine_state,
|
||||
&path_uri,
|
||||
params.text_document_position.position,
|
||||
)
|
||||
.ok()?;
|
||||
self.find_reference_in_workspace(
|
||||
&mut working_set,
|
||||
&path_uri,
|
||||
id,
|
||||
span,
|
||||
params
|
||||
.work_done_progress_params
|
||||
.work_done_token
|
||||
.to_owned()
|
||||
.unwrap_or(ProgressToken::Number(1)),
|
||||
"Finding references ...",
|
||||
)
|
||||
.ok()?;
|
||||
// have to clone it again in order to move to another thread
|
||||
let mut engine_state = self.new_engine_state();
|
||||
engine_state.merge_delta(working_set.render()).ok()?;
|
||||
let current_workspace_folder = self.get_workspace_folder_by_uri(&path_uri)?;
|
||||
let token = params
|
||||
.work_done_progress_params
|
||||
.work_done_token
|
||||
.to_owned()
|
||||
.unwrap_or(ProgressToken::Number(1));
|
||||
self.channels = self
|
||||
.find_reference_in_workspace(
|
||||
engine_state,
|
||||
current_workspace_folder,
|
||||
id,
|
||||
span,
|
||||
token.clone(),
|
||||
"Finding references ...".to_string(),
|
||||
)
|
||||
.ok();
|
||||
// TODO: WorkDoneProgress -> PartialResults for quicker response
|
||||
// currently not enabled by `lsp_types` but hackable in `server_capabilities` json
|
||||
let time_start = std::time::Instant::now();
|
||||
loop {
|
||||
if self.handle_internal_messages().ok()? {
|
||||
break;
|
||||
}
|
||||
if time_start.elapsed().as_millis() > timeout {
|
||||
self.send_progress_end(token, Some("Timeout".to_string()))
|
||||
.ok()?;
|
||||
self.cancel_background_thread();
|
||||
self.channels = None;
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some(
|
||||
self.occurrences
|
||||
.iter()
|
||||
@ -108,7 +202,7 @@ impl LanguageServer {
|
||||
let mut engine_state = self.new_engine_state();
|
||||
let path_uri = params.text_document.uri.to_owned();
|
||||
|
||||
let (mut working_set, id, span, file_offset, file) =
|
||||
let (working_set, id, span, file_offset) =
|
||||
self.parse_and_find(&mut engine_state, &path_uri, params.position)?;
|
||||
|
||||
if let Id::Value(_) = id {
|
||||
@ -119,6 +213,14 @@ impl LanguageServer {
|
||||
"\nDefinition not found.\nNot allowed to rename built-ins."
|
||||
));
|
||||
}
|
||||
|
||||
let docs = match self.docs.lock() {
|
||||
Ok(it) => it,
|
||||
Err(err) => return Err(miette!(err.to_string())),
|
||||
};
|
||||
let file = docs
|
||||
.get_document(&path_uri)
|
||||
.ok_or_else(|| miette!("\nFailed to get document"))?;
|
||||
let range = span_to_range(&span, file, file_offset);
|
||||
let response = PrepareRenameResponse::Range(range);
|
||||
self.connection
|
||||
@ -130,94 +232,112 @@ impl LanguageServer {
|
||||
}))
|
||||
.into_diagnostic()?;
|
||||
|
||||
// have to clone it again in order to move to another thread
|
||||
let mut engine_state = self.new_engine_state();
|
||||
engine_state
|
||||
.merge_delta(working_set.render())
|
||||
.into_diagnostic()?;
|
||||
let current_workspace_folder = self
|
||||
.get_workspace_folder_by_uri(&path_uri)
|
||||
.ok_or_else(|| miette!("\nCurrent file is not in any workspace"))?;
|
||||
// now continue parsing on other files in the workspace
|
||||
self.find_reference_in_workspace(
|
||||
&mut working_set,
|
||||
&path_uri,
|
||||
id,
|
||||
span,
|
||||
ProgressToken::Number(0),
|
||||
"Preparing rename ...",
|
||||
)
|
||||
self.channels = self
|
||||
.find_reference_in_workspace(
|
||||
engine_state,
|
||||
current_workspace_folder,
|
||||
id,
|
||||
span,
|
||||
ProgressToken::Number(0),
|
||||
"Preparing rename ...".to_string(),
|
||||
)
|
||||
.ok();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn find_reference_in_workspace(
|
||||
&mut self,
|
||||
working_set: &mut StateWorkingSet,
|
||||
current_uri: &Uri,
|
||||
&self,
|
||||
engine_state: EngineState,
|
||||
current_workspace_folder: WorkspaceFolder,
|
||||
id: Id,
|
||||
span: Span,
|
||||
token: ProgressToken,
|
||||
message: &str,
|
||||
) -> Result<()> {
|
||||
let current_workspace_folder = self
|
||||
.get_workspace_folder_by_uri(current_uri)
|
||||
.ok_or_else(|| miette!("\nCurrent file is not in any workspace"))?;
|
||||
let scripts: Vec<PathBuf> = Self::find_nu_scripts_in_folder(¤t_workspace_folder.uri)?
|
||||
.filter_map(|p| p.ok())
|
||||
.collect();
|
||||
let len = scripts.len();
|
||||
|
||||
message: String,
|
||||
) -> Result<(Sender<bool>, Arc<Receiver<InternalMessage>>)> {
|
||||
let (data_sender, data_receiver) = crossbeam_channel::unbounded::<InternalMessage>();
|
||||
let (cancel_sender, cancel_receiver) = crossbeam_channel::bounded::<bool>(1);
|
||||
let engine_state = Arc::new(engine_state);
|
||||
let docs = self.docs.clone();
|
||||
self.send_progress_begin(token.clone(), message)?;
|
||||
for (i, fp) in scripts.iter().enumerate() {
|
||||
let uri = path_to_uri(fp);
|
||||
if let Some(file) = self.docs.get_document(&uri) {
|
||||
Self::find_reference_in_file(working_set, file, fp, &id)
|
||||
} else {
|
||||
let bytes = fs::read(fp).into_diagnostic()?;
|
||||
// skip if the file does not contain what we're looking for
|
||||
let content_string = String::from_utf8(bytes).into_diagnostic()?;
|
||||
let text_to_search =
|
||||
String::from_utf8(working_set.get_span_contents(span).to_vec())
|
||||
.into_diagnostic()?;
|
||||
if !content_string.contains(&text_to_search) {
|
||||
continue;
|
||||
|
||||
std::thread::spawn(move || -> Result<()> {
|
||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||
let scripts: Vec<PathBuf> =
|
||||
match find_nu_scripts_in_folder(¤t_workspace_folder.uri) {
|
||||
Ok(it) => it,
|
||||
Err(_) => {
|
||||
data_sender
|
||||
.send(InternalMessage::Cancelled(token.clone()))
|
||||
.ok();
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let temp_file = FullTextDocument::new("nu".to_string(), 0, content_string);
|
||||
Self::find_reference_in_file(working_set, &temp_file, fp, &id)
|
||||
.filter_map(|p| p.ok())
|
||||
.collect();
|
||||
let len = scripts.len();
|
||||
|
||||
for (i, fp) in scripts.iter().enumerate() {
|
||||
// std::thread::sleep(std::time::Duration::from_millis(500));
|
||||
// cancel the loop on cancellation message from main thread
|
||||
if cancel_receiver.try_recv().is_ok() {
|
||||
data_sender
|
||||
.send(InternalMessage::Cancelled(token.clone()))
|
||||
.into_diagnostic()?;
|
||||
return Ok(());
|
||||
}
|
||||
let percentage = (i * 100 / len) as u32;
|
||||
let uri = path_to_uri(fp);
|
||||
let docs = match docs.lock() {
|
||||
Ok(it) => it,
|
||||
Err(err) => return Err(miette!(err.to_string())),
|
||||
};
|
||||
let file = if let Some(file) = docs.get_document(&uri) {
|
||||
file
|
||||
} else {
|
||||
let bytes = match fs::read(fp) {
|
||||
Ok(it) => it,
|
||||
Err(_) => {
|
||||
// continue on fs error
|
||||
continue;
|
||||
}
|
||||
};
|
||||
// skip if the file does not contain what we're looking for
|
||||
let content_string = String::from_utf8_lossy(&bytes);
|
||||
let text_to_search =
|
||||
String::from_utf8_lossy(working_set.get_span_contents(span));
|
||||
if !content_string.contains(text_to_search.as_ref()) {
|
||||
// progress without any data
|
||||
data_sender
|
||||
.send(InternalMessage::OnGoing(token.clone(), percentage))
|
||||
.into_diagnostic()?;
|
||||
continue;
|
||||
}
|
||||
&FullTextDocument::new("nu".to_string(), 0, content_string.into())
|
||||
};
|
||||
let _ = find_reference_in_file(&mut working_set, file, fp, &id).map(|ranges| {
|
||||
data_sender
|
||||
.send(InternalMessage::RangeMessage(RangePerDoc { uri, ranges }))
|
||||
.ok();
|
||||
data_sender
|
||||
.send(InternalMessage::OnGoing(token.clone(), percentage))
|
||||
.ok();
|
||||
});
|
||||
}
|
||||
.and_then(|range| self.occurrences.insert(uri, range));
|
||||
self.send_progress_report(token.clone(), (i * 100 / len) as u32, None)?
|
||||
}
|
||||
self.send_progress_end(token.clone(), Some("Done".to_string()))
|
||||
}
|
||||
|
||||
fn find_reference_in_file(
|
||||
working_set: &mut StateWorkingSet,
|
||||
file: &FullTextDocument,
|
||||
fp: &Path,
|
||||
id: &Id,
|
||||
) -> Option<Vec<Range>> {
|
||||
let fp_str = fp.to_str()?;
|
||||
let block = parse(
|
||||
working_set,
|
||||
Some(fp_str),
|
||||
file.get_content(None).as_bytes(),
|
||||
false,
|
||||
);
|
||||
let file_span = working_set.get_span_for_filename(fp_str)?;
|
||||
let offset = file_span.start;
|
||||
let mut references: Vec<Span> = find_reference_by_id(&block, working_set, id);
|
||||
|
||||
// NOTE: for arguments whose declaration is in a signature
|
||||
// which is not covered in the AST
|
||||
if let Id::Variable(vid) = id {
|
||||
let decl_span = working_set.get_variable(*vid).declaration_span;
|
||||
if file_span.contains_span(decl_span)
|
||||
&& decl_span.end > decl_span.start
|
||||
&& !references.contains(&decl_span)
|
||||
{
|
||||
references.push(decl_span);
|
||||
}
|
||||
}
|
||||
let occurs: Vec<Range> = references
|
||||
.iter()
|
||||
.map(|span| span_to_range(span, file, offset))
|
||||
.collect();
|
||||
|
||||
// add_block to avoid repeated parsing
|
||||
working_set.add_block(block);
|
||||
(!occurs.is_empty()).then_some(occurs)
|
||||
data_sender
|
||||
.send(InternalMessage::Finished(token.clone()))
|
||||
.into_diagnostic()?;
|
||||
Ok(())
|
||||
});
|
||||
Ok((cancel_sender, Arc::new(data_receiver)))
|
||||
}
|
||||
|
||||
fn get_workspace_folder_by_uri(&self, uri: &Uri) -> Option<WorkspaceFolder> {
|
||||
@ -231,15 +351,6 @@ impl LanguageServer {
|
||||
})
|
||||
.cloned()
|
||||
}
|
||||
|
||||
fn find_nu_scripts_in_folder(folder_uri: &Uri) -> Result<Paths> {
|
||||
let path = uri_to_path(folder_uri);
|
||||
if !path.is_dir() {
|
||||
return Err(miette!("\nworkspace folder does not exist."));
|
||||
}
|
||||
let pattern = format!("{}/**/*.nu", path.to_string_lossy());
|
||||
glob(&pattern).into_diagnostic()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -525,7 +636,7 @@ mod tests {
|
||||
|
||||
open_unchecked(&client_connection, script.clone());
|
||||
|
||||
let message_num = 4;
|
||||
let message_num = 5;
|
||||
let messages =
|
||||
send_rename_prepare_request(&client_connection, script.clone(), 3, 5, message_num);
|
||||
assert_eq!(messages.len(), message_num);
|
||||
|
Loading…
x
Reference in New Issue
Block a user