feat(lsp): cancellable heavy requests (#14851)

<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx

you can also mention related issues, PRs or discussions!
-->

# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.

Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->

`tower-lsp` seems not well-maintained, I ended up with a dedicated
thread for heavy computing and message passing to cancel it on any new
request.

During the progress, interrupting with edits or new requests.

<img width="522" alt="image"
src="https://github.com/user-attachments/assets/b263d73d-8ea3-4b26-a7b7-e0b30462d1af"
/>

Goto references are still blocking, with a hard timeout of 5 seconds.
Only locations found within the time limit are returned. Technically,
reference requests allow for responses with partial results, which means
instant responsiveness. However, the `lsp_types` crate hasn’t enabled
this. I believe I can still enable it with some JSON manipulation, but
I’ll leave it for future work.

# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->

# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.

Make sure you've run and fixed any issues with these commands:

- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use toolkit.nu; toolkit test stdlib"` to run the
tests for the standard library

> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->

Need some clever way to test the cancellation, no test cases added yet.

# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
This commit is contained in:
zc he 2025-01-17 23:57:35 +08:00 committed by GitHub
parent 75105033b2
commit 3f5ebd75b6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 312 additions and 134 deletions

View File

@ -3,24 +3,30 @@ use lsp_types::{
notification::{Notification, PublishDiagnostics}, notification::{Notification, PublishDiagnostics},
Diagnostic, DiagnosticSeverity, PublishDiagnosticsParams, Uri, Diagnostic, DiagnosticSeverity, PublishDiagnosticsParams, Uri,
}; };
use miette::{IntoDiagnostic, Result}; use miette::{miette, IntoDiagnostic, Result};
impl LanguageServer { impl LanguageServer {
pub(crate) fn publish_diagnostics_for_file(&mut self, uri: Uri) -> Result<()> { pub(crate) fn publish_diagnostics_for_file(&mut self, uri: Uri) -> Result<()> {
let mut engine_state = self.new_engine_state(); let mut engine_state = self.new_engine_state();
engine_state.generate_nu_constant(); engine_state.generate_nu_constant();
let Some((_, offset, working_set, file)) = self.parse_file(&mut engine_state, &uri, true) let Some((_, offset, working_set)) = self.parse_file(&mut engine_state, &uri, true) else {
else {
return Ok(()); return Ok(());
}; };
let mut diagnostics = PublishDiagnosticsParams { let mut diagnostics = PublishDiagnosticsParams {
uri, uri: uri.clone(),
diagnostics: Vec::new(), diagnostics: Vec::new(),
version: None, version: None,
}; };
let docs = match self.docs.lock() {
Ok(it) => it,
Err(err) => return Err(miette!(err.to_string())),
};
let file = docs
.get_document(&uri)
.ok_or_else(|| miette!("\nFailed to get document"))?;
for err in working_set.parse_errors.iter() { for err in working_set.parse_errors.iter() {
let message = err.to_string(); let message = err.to_string();

View File

@ -33,7 +33,7 @@ impl LanguageServer {
.text_document .text_document
.uri .uri
.to_owned(); .to_owned();
let (working_set, id, _, _, _) = self let (working_set, id, _, _) = self
.parse_and_find( .parse_and_find(
&mut engine_state, &mut engine_state,
&path_uri, &path_uri,

View File

@ -1,5 +1,6 @@
#![doc = include_str!("../README.md")] #![doc = include_str!("../README.md")]
use ast::find_id; use ast::find_id;
use crossbeam_channel::{Receiver, Sender};
use lsp_server::{Connection, IoThreads, Message, Response, ResponseError}; use lsp_server::{Connection, IoThreads, Message, Response, ResponseError};
use lsp_textdocument::{FullTextDocument, TextDocuments}; use lsp_textdocument::{FullTextDocument, TextDocuments};
use lsp_types::{ use lsp_types::{
@ -18,7 +19,7 @@ use nu_protocol::{
engine::{CachedFile, EngineState, Stack, StateWorkingSet}, engine::{CachedFile, EngineState, Stack, StateWorkingSet},
DeclId, ModuleId, Span, Type, Value, VarId, DeclId, ModuleId, Span, Type, Value, VarId,
}; };
use std::collections::BTreeMap; use std::{collections::BTreeMap, sync::Mutex};
use std::{ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
str::FromStr, str::FromStr,
@ -27,6 +28,7 @@ use std::{
}; };
use symbols::SymbolCache; use symbols::SymbolCache;
use url::Url; use url::Url;
use workspace::{InternalMessage, RangePerDoc};
mod ast; mod ast;
mod diagnostics; mod diagnostics;
@ -47,13 +49,14 @@ pub enum Id {
pub struct LanguageServer { pub struct LanguageServer {
connection: Connection, connection: Connection,
io_threads: Option<IoThreads>, io_threads: Option<IoThreads>,
docs: TextDocuments, docs: Arc<Mutex<TextDocuments>>,
engine_state: EngineState, engine_state: EngineState,
symbol_cache: SymbolCache, symbol_cache: SymbolCache,
inlay_hints: BTreeMap<Uri, Vec<InlayHint>>, inlay_hints: BTreeMap<Uri, Vec<InlayHint>>,
workspace_folders: BTreeMap<String, WorkspaceFolder>, workspace_folders: BTreeMap<String, WorkspaceFolder>,
// for workspace wide requests // for workspace wide requests
occurrences: BTreeMap<Uri, Vec<Range>>, occurrences: BTreeMap<Uri, Vec<Range>>,
channels: Option<(Sender<bool>, Arc<Receiver<InternalMessage>>)>,
} }
pub fn path_to_uri(path: impl AsRef<Path>) -> Uri { pub fn path_to_uri(path: impl AsRef<Path>) -> Uri {
@ -92,12 +95,13 @@ impl LanguageServer {
Ok(Self { Ok(Self {
connection, connection,
io_threads, io_threads,
docs: TextDocuments::new(), docs: Arc::new(Mutex::new(TextDocuments::new())),
engine_state, engine_state,
symbol_cache: SymbolCache::new(), symbol_cache: SymbolCache::new(),
inlay_hints: BTreeMap::new(), inlay_hints: BTreeMap::new(),
workspace_folders: BTreeMap::new(), workspace_folders: BTreeMap::new(),
occurrences: BTreeMap::new(), occurrences: BTreeMap::new(),
channels: None,
}) })
} }
@ -141,12 +145,19 @@ impl LanguageServer {
self.initialize_workspace_folders(init_params)?; self.initialize_workspace_folders(init_params)?;
while !self.engine_state.signals().interrupted() { while !self.engine_state.signals().interrupted() {
// first check new messages from child thread
self.handle_internal_messages()?;
let msg = match self let msg = match self
.connection .connection
.receiver .receiver
.recv_timeout(Duration::from_secs(1)) .recv_timeout(Duration::from_secs(1))
{ {
Ok(msg) => msg, Ok(msg) => {
// cancel execution if other messages received before job done
self.cancel_background_thread();
msg
}
Err(crossbeam_channel::RecvTimeoutError::Timeout) => { Err(crossbeam_channel::RecvTimeoutError::Timeout) => {
continue; continue;
} }
@ -177,7 +188,9 @@ impl LanguageServer {
Self::handle_lsp_request(request, |params| self.document_symbol(params)) Self::handle_lsp_request(request, |params| self.document_symbol(params))
} }
request::References::METHOD => { request::References::METHOD => {
Self::handle_lsp_request(request, |params| self.references(params)) Self::handle_lsp_request(request, |params| {
self.references(params, 5000)
})
} }
request::WorkspaceSymbolRequest::METHOD => { request::WorkspaceSymbolRequest::METHOD => {
Self::handle_lsp_request(request, |params| { Self::handle_lsp_request(request, |params| {
@ -224,6 +237,42 @@ impl LanguageServer {
Ok(()) Ok(())
} }
/// Send a cancel message to a running bg thread
pub fn cancel_background_thread(&mut self) {
if let Some((sender, _)) = &self.channels {
sender.send(true).ok();
}
}
/// Check results from background thread
pub fn handle_internal_messages(&mut self) -> Result<bool> {
let mut reset = false;
if let Some((_, receiver)) = &self.channels {
for im in receiver.try_iter() {
match im {
InternalMessage::RangeMessage(RangePerDoc { uri, ranges }) => {
self.occurrences.insert(uri, ranges);
}
InternalMessage::OnGoing(token, progress) => {
self.send_progress_report(token, progress, None)?;
}
InternalMessage::Finished(token) => {
reset = true;
self.send_progress_end(token, Some("Finished.".to_string()))?;
}
InternalMessage::Cancelled(token) => {
reset = true;
self.send_progress_end(token, Some("interrupted.".to_string()))?;
}
}
}
}
if reset {
self.channels = None;
}
Ok(reset)
}
pub fn new_engine_state(&self) -> EngineState { pub fn new_engine_state(&self) -> EngineState {
let mut engine_state = self.engine_state.clone(); let mut engine_state = self.engine_state.clone();
let cwd = std::env::current_dir().expect("Could not get current working directory."); let cwd = std::env::current_dir().expect("Could not get current working directory.");
@ -236,17 +285,24 @@ impl LanguageServer {
engine_state: &'a mut EngineState, engine_state: &'a mut EngineState,
uri: &Uri, uri: &Uri,
pos: Position, pos: Position,
) -> Result<(StateWorkingSet<'a>, Id, Span, usize, &FullTextDocument)> { ) -> Result<(StateWorkingSet<'a>, Id, Span, usize)> {
let (block, file_offset, mut working_set, file) = self let (block, file_offset, mut working_set) = self
.parse_file(engine_state, uri, false) .parse_file(engine_state, uri, false)
.ok_or_else(|| miette!("\nFailed to parse current file"))?; .ok_or_else(|| miette!("\nFailed to parse current file"))?;
let docs = match self.docs.lock() {
Ok(it) => it,
Err(err) => return Err(miette!(err.to_string())),
};
let file = docs
.get_document(uri)
.ok_or_else(|| miette!("\nFailed to get document"))?;
let location = file.offset_at(pos) as usize + file_offset; let location = file.offset_at(pos) as usize + file_offset;
let (id, span) = find_id(&block, &working_set, &location) let (id, span) = find_id(&block, &working_set, &location)
.ok_or_else(|| miette!("\nFailed to find current name"))?; .ok_or_else(|| miette!("\nFailed to find current name"))?;
// add block to working_set for later references // add block to working_set for later references
working_set.add_block(block); working_set.add_block(block);
Ok((working_set, id, span, file_offset, file)) Ok((working_set, id, span, file_offset))
} }
pub fn parse_file<'a>( pub fn parse_file<'a>(
@ -254,9 +310,10 @@ impl LanguageServer {
engine_state: &'a mut EngineState, engine_state: &'a mut EngineState,
uri: &Uri, uri: &Uri,
need_hints: bool, need_hints: bool,
) -> Option<(Arc<Block>, usize, StateWorkingSet<'a>, &FullTextDocument)> { ) -> Option<(Arc<Block>, usize, StateWorkingSet<'a>)> {
let mut working_set = StateWorkingSet::new(engine_state); let mut working_set = StateWorkingSet::new(engine_state);
let file = self.docs.get_document(uri)?; let docs = self.docs.lock().ok()?;
let file = docs.get_document(uri)?;
let file_path = uri_to_path(uri); let file_path = uri_to_path(uri);
let file_path_str = file_path.to_str()?; let file_path_str = file_path.to_str()?;
let contents = file.get_content(None).as_bytes(); let contents = file.get_content(None).as_bytes();
@ -270,7 +327,7 @@ impl LanguageServer {
let file_inlay_hints = self.extract_inlay_hints(&working_set, &block, offset, file); let file_inlay_hints = self.extract_inlay_hints(&working_set, &block, offset, file);
self.inlay_hints.insert(uri.clone(), file_inlay_hints); self.inlay_hints.insert(uri.clone(), file_inlay_hints);
} }
Some((block, offset, working_set, file)) Some((block, offset, working_set))
} }
fn get_location_by_span<'a>( fn get_location_by_span<'a>(
@ -285,10 +342,10 @@ impl LanguageServer {
return None; return None;
} }
let target_uri = path_to_uri(path); let target_uri = path_to_uri(path);
if let Some(doc) = self.docs.get_document(&target_uri) { if let Some(file) = self.docs.lock().ok()?.get_document(&target_uri) {
return Some(Location { return Some(Location {
uri: target_uri, uri: target_uri,
range: span_to_range(span, doc, cached_file.covered_span.start), range: span_to_range(span, file, cached_file.covered_span.start),
}); });
} else { } else {
// in case where the document is not opened yet, typically included by `nu -I` // in case where the document is not opened yet, typically included by `nu -I`
@ -344,7 +401,7 @@ impl LanguageServer {
.text_document .text_document
.uri .uri
.to_owned(); .to_owned();
let (working_set, id, _, _, _) = self let (working_set, id, _, _) = self
.parse_and_find( .parse_and_find(
&mut engine_state, &mut engine_state,
&path_uri, &path_uri,
@ -525,7 +582,8 @@ impl LanguageServer {
fn complete(&mut self, params: &CompletionParams) -> Option<CompletionResponse> { fn complete(&mut self, params: &CompletionParams) -> Option<CompletionResponse> {
let path_uri = params.text_document_position.text_document.uri.to_owned(); let path_uri = params.text_document_position.text_document.uri.to_owned();
let file = self.docs.get_document(&path_uri)?; let docs = self.docs.lock().ok()?;
let file = docs.get_document(&path_uri)?;
let mut completer = let mut completer =
NuCompleter::new(Arc::new(self.engine_state.clone()), Arc::new(Stack::new())); NuCompleter::new(Arc::new(self.engine_state.clone()), Arc::new(Stack::new()));

View File

@ -17,8 +17,8 @@ impl LanguageServer {
&mut self, &mut self,
notification: lsp_server::Notification, notification: lsp_server::Notification,
) -> Option<Uri> { ) -> Option<Uri> {
self.docs let mut docs = self.docs.lock().ok()?;
.listen(notification.method.as_str(), &notification.params); docs.listen(notification.method.as_str(), &notification.params);
match notification.method.as_str() { match notification.method.as_str() {
DidOpenTextDocument::METHOD => { DidOpenTextDocument::METHOD => {
let params: DidOpenTextDocumentParams = let params: DidOpenTextDocumentParams =
@ -57,7 +57,7 @@ impl LanguageServer {
} }
} }
fn send_progress_notification( pub fn send_progress_notification(
&self, &self,
token: ProgressToken, token: ProgressToken,
value: WorkDoneProgress, value: WorkDoneProgress,
@ -74,12 +74,13 @@ impl LanguageServer {
.into_diagnostic() .into_diagnostic()
} }
pub fn send_progress_begin(&self, token: ProgressToken, title: &str) -> Result<()> { pub fn send_progress_begin(&self, token: ProgressToken, title: String) -> Result<()> {
self.send_progress_notification( self.send_progress_notification(
token, token,
WorkDoneProgress::Begin(WorkDoneProgressBegin { WorkDoneProgress::Begin(WorkDoneProgressBegin {
title: title.to_string(), title,
percentage: Some(0), percentage: Some(0),
cancellable: Some(true),
..Default::default() ..Default::default()
}), }),
) )
@ -95,8 +96,8 @@ impl LanguageServer {
token, token,
WorkDoneProgress::Report(WorkDoneProgressReport { WorkDoneProgress::Report(WorkDoneProgressReport {
message, message,
cancellable: Some(true),
percentage: Some(percentage), percentage: Some(percentage),
..Default::default()
}), }),
) )
} }

View File

@ -272,7 +272,8 @@ impl LanguageServer {
) -> Option<DocumentSymbolResponse> { ) -> Option<DocumentSymbolResponse> {
let engine_state = self.new_engine_state(); let engine_state = self.new_engine_state();
let uri = params.text_document.uri.to_owned(); let uri = params.text_document.uri.to_owned();
self.symbol_cache.update(&uri, &engine_state, &self.docs); let docs = self.docs.lock().ok()?;
self.symbol_cache.update(&uri, &engine_state, &docs);
Some(DocumentSymbolResponse::Flat( Some(DocumentSymbolResponse::Flat(
self.symbol_cache.get_symbols_by_uri(&uri)?, self.symbol_cache.get_symbols_by_uri(&uri)?,
)) ))
@ -284,7 +285,8 @@ impl LanguageServer {
) -> Option<WorkspaceSymbolResponse> { ) -> Option<WorkspaceSymbolResponse> {
if self.symbol_cache.any_dirty() { if self.symbol_cache.any_dirty() {
let engine_state = self.new_engine_state(); let engine_state = self.new_engine_state();
self.symbol_cache.update_all(&engine_state, &self.docs); let docs = self.docs.lock().ok()?;
self.symbol_cache.update_all(&engine_state, &docs);
} }
Some(WorkspaceSymbolResponse::Flat( Some(WorkspaceSymbolResponse::Flat(
self.symbol_cache self.symbol_cache

View File

@ -4,11 +4,13 @@ use std::{
collections::{BTreeMap, HashMap}, collections::{BTreeMap, HashMap},
fs, fs,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc,
}; };
use crate::{ use crate::{
ast::find_reference_by_id, path_to_uri, span_to_range, uri_to_path, Id, LanguageServer, ast::find_reference_by_id, path_to_uri, span_to_range, uri_to_path, Id, LanguageServer,
}; };
use crossbeam_channel::{Receiver, Sender};
use lsp_server::{Message, Request, Response}; use lsp_server::{Message, Request, Response};
use lsp_types::{ use lsp_types::{
Location, PrepareRenameResponse, ProgressToken, Range, ReferenceParams, RenameParams, Location, PrepareRenameResponse, ProgressToken, Range, ReferenceParams, RenameParams,
@ -16,9 +18,75 @@ use lsp_types::{
}; };
use miette::{miette, IntoDiagnostic, Result}; use miette::{miette, IntoDiagnostic, Result};
use nu_glob::{glob, Paths}; use nu_glob::{glob, Paths};
use nu_protocol::{engine::StateWorkingSet, Span}; use nu_protocol::{
engine::{EngineState, StateWorkingSet},
Span,
};
use serde_json::Value; use serde_json::Value;
/// Message type indicating ranges of interest in each doc
#[derive(Debug)]
pub struct RangePerDoc {
pub uri: Uri,
pub ranges: Vec<Range>,
}
/// Message sent from background thread to main
#[derive(Debug)]
pub enum InternalMessage {
RangeMessage(RangePerDoc),
Cancelled(ProgressToken),
Finished(ProgressToken),
OnGoing(ProgressToken, u32),
}
fn find_nu_scripts_in_folder(folder_uri: &Uri) -> Result<Paths> {
let path = uri_to_path(folder_uri);
if !path.is_dir() {
return Err(miette!("\nworkspace folder does not exist."));
}
let pattern = format!("{}/**/*.nu", path.to_string_lossy());
glob(&pattern).into_diagnostic()
}
fn find_reference_in_file(
working_set: &mut StateWorkingSet,
file: &FullTextDocument,
fp: &Path,
id: &Id,
) -> Option<Vec<Range>> {
let fp_str = fp.to_str()?;
let block = parse(
working_set,
Some(fp_str),
file.get_content(None).as_bytes(),
false,
);
let file_span = working_set.get_span_for_filename(fp_str)?;
let offset = file_span.start;
let mut references: Vec<Span> = find_reference_by_id(&block, working_set, id);
// NOTE: for arguments whose declaration is in a signature
// which is not covered in the AST
if let Id::Variable(vid) = id {
let decl_span = working_set.get_variable(*vid).declaration_span;
if file_span.contains_span(decl_span)
&& decl_span.end > decl_span.start
&& !references.contains(&decl_span)
{
references.push(decl_span);
}
}
let occurs: Vec<Range> = references
.iter()
.map(|span| span_to_range(span, file, offset))
.collect();
// add_block to avoid repeated parsing
working_set.add_block(block);
(!occurs.is_empty()).then_some(occurs)
}
impl LanguageServer { impl LanguageServer {
/// get initial workspace folders from initialization response /// get initial workspace folders from initialization response
pub fn initialize_workspace_folders(&mut self, init_params: Value) -> Result<()> { pub fn initialize_workspace_folders(&mut self, init_params: Value) -> Result<()> {
@ -32,6 +100,8 @@ impl LanguageServer {
Ok(()) Ok(())
} }
/// The rename request only happens after the client received a `PrepareRenameResponse`,
/// and a new name typed in, could happen before ranges ready for all files in the workspace folder
pub fn rename(&mut self, params: &RenameParams) -> Option<WorkspaceEdit> { pub fn rename(&mut self, params: &RenameParams) -> Option<WorkspaceEdit> {
let new_name = params.new_name.to_owned(); let new_name = params.new_name.to_owned();
// changes in WorkspaceEdit have mutable key // changes in WorkspaceEdit have mutable key
@ -59,31 +129,55 @@ impl LanguageServer {
} }
/// Goto references response /// Goto references response
/// TODO: WorkDoneProgress -> PartialResults /// # Arguments
pub fn references(&mut self, params: &ReferenceParams) -> Option<Vec<Location>> { /// - `timeout`: timeout in milliseconds, when timeout
/// 1. Respond with all ranges found so far
/// 2. Cancel the background thread
pub fn references(&mut self, params: &ReferenceParams, timeout: u128) -> Option<Vec<Location>> {
self.occurrences = BTreeMap::new(); self.occurrences = BTreeMap::new();
let mut engine_state = self.new_engine_state(); let mut engine_state = self.new_engine_state();
let path_uri = params.text_document_position.text_document.uri.to_owned(); let path_uri = params.text_document_position.text_document.uri.to_owned();
let (mut working_set, id, span, _, _) = self let (working_set, id, span, _) = self
.parse_and_find( .parse_and_find(
&mut engine_state, &mut engine_state,
&path_uri, &path_uri,
params.text_document_position.position, params.text_document_position.position,
) )
.ok()?; .ok()?;
self.find_reference_in_workspace( // have to clone it again in order to move to another thread
&mut working_set, let mut engine_state = self.new_engine_state();
&path_uri, engine_state.merge_delta(working_set.render()).ok()?;
id, let current_workspace_folder = self.get_workspace_folder_by_uri(&path_uri)?;
span, let token = params
params .work_done_progress_params
.work_done_progress_params .work_done_token
.work_done_token .to_owned()
.to_owned() .unwrap_or(ProgressToken::Number(1));
.unwrap_or(ProgressToken::Number(1)), self.channels = self
"Finding references ...", .find_reference_in_workspace(
) engine_state,
.ok()?; current_workspace_folder,
id,
span,
token.clone(),
"Finding references ...".to_string(),
)
.ok();
// TODO: WorkDoneProgress -> PartialResults for quicker response
// currently not enabled by `lsp_types` but hackable in `server_capabilities` json
let time_start = std::time::Instant::now();
loop {
if self.handle_internal_messages().ok()? {
break;
}
if time_start.elapsed().as_millis() > timeout {
self.send_progress_end(token, Some("Timeout".to_string()))
.ok()?;
self.cancel_background_thread();
self.channels = None;
break;
}
}
Some( Some(
self.occurrences self.occurrences
.iter() .iter()
@ -108,7 +202,7 @@ impl LanguageServer {
let mut engine_state = self.new_engine_state(); let mut engine_state = self.new_engine_state();
let path_uri = params.text_document.uri.to_owned(); let path_uri = params.text_document.uri.to_owned();
let (mut working_set, id, span, file_offset, file) = let (working_set, id, span, file_offset) =
self.parse_and_find(&mut engine_state, &path_uri, params.position)?; self.parse_and_find(&mut engine_state, &path_uri, params.position)?;
if let Id::Value(_) = id { if let Id::Value(_) = id {
@ -119,6 +213,14 @@ impl LanguageServer {
"\nDefinition not found.\nNot allowed to rename built-ins." "\nDefinition not found.\nNot allowed to rename built-ins."
)); ));
} }
let docs = match self.docs.lock() {
Ok(it) => it,
Err(err) => return Err(miette!(err.to_string())),
};
let file = docs
.get_document(&path_uri)
.ok_or_else(|| miette!("\nFailed to get document"))?;
let range = span_to_range(&span, file, file_offset); let range = span_to_range(&span, file, file_offset);
let response = PrepareRenameResponse::Range(range); let response = PrepareRenameResponse::Range(range);
self.connection self.connection
@ -130,94 +232,112 @@ impl LanguageServer {
})) }))
.into_diagnostic()?; .into_diagnostic()?;
// have to clone it again in order to move to another thread
let mut engine_state = self.new_engine_state();
engine_state
.merge_delta(working_set.render())
.into_diagnostic()?;
let current_workspace_folder = self
.get_workspace_folder_by_uri(&path_uri)
.ok_or_else(|| miette!("\nCurrent file is not in any workspace"))?;
// now continue parsing on other files in the workspace // now continue parsing on other files in the workspace
self.find_reference_in_workspace( self.channels = self
&mut working_set, .find_reference_in_workspace(
&path_uri, engine_state,
id, current_workspace_folder,
span, id,
ProgressToken::Number(0), span,
"Preparing rename ...", ProgressToken::Number(0),
) "Preparing rename ...".to_string(),
)
.ok();
Ok(())
} }
fn find_reference_in_workspace( fn find_reference_in_workspace(
&mut self, &self,
working_set: &mut StateWorkingSet, engine_state: EngineState,
current_uri: &Uri, current_workspace_folder: WorkspaceFolder,
id: Id, id: Id,
span: Span, span: Span,
token: ProgressToken, token: ProgressToken,
message: &str, message: String,
) -> Result<()> { ) -> Result<(Sender<bool>, Arc<Receiver<InternalMessage>>)> {
let current_workspace_folder = self let (data_sender, data_receiver) = crossbeam_channel::unbounded::<InternalMessage>();
.get_workspace_folder_by_uri(current_uri) let (cancel_sender, cancel_receiver) = crossbeam_channel::bounded::<bool>(1);
.ok_or_else(|| miette!("\nCurrent file is not in any workspace"))?; let engine_state = Arc::new(engine_state);
let scripts: Vec<PathBuf> = Self::find_nu_scripts_in_folder(&current_workspace_folder.uri)? let docs = self.docs.clone();
.filter_map(|p| p.ok())
.collect();
let len = scripts.len();
self.send_progress_begin(token.clone(), message)?; self.send_progress_begin(token.clone(), message)?;
for (i, fp) in scripts.iter().enumerate() {
let uri = path_to_uri(fp); std::thread::spawn(move || -> Result<()> {
if let Some(file) = self.docs.get_document(&uri) { let mut working_set = StateWorkingSet::new(&engine_state);
Self::find_reference_in_file(working_set, file, fp, &id) let scripts: Vec<PathBuf> =
} else { match find_nu_scripts_in_folder(&current_workspace_folder.uri) {
let bytes = fs::read(fp).into_diagnostic()?; Ok(it) => it,
// skip if the file does not contain what we're looking for Err(_) => {
let content_string = String::from_utf8(bytes).into_diagnostic()?; data_sender
let text_to_search = .send(InternalMessage::Cancelled(token.clone()))
String::from_utf8(working_set.get_span_contents(span).to_vec()) .ok();
.into_diagnostic()?; return Ok(());
if !content_string.contains(&text_to_search) { }
continue;
} }
let temp_file = FullTextDocument::new("nu".to_string(), 0, content_string); .filter_map(|p| p.ok())
Self::find_reference_in_file(working_set, &temp_file, fp, &id) .collect();
let len = scripts.len();
for (i, fp) in scripts.iter().enumerate() {
// std::thread::sleep(std::time::Duration::from_millis(500));
// cancel the loop on cancellation message from main thread
if cancel_receiver.try_recv().is_ok() {
data_sender
.send(InternalMessage::Cancelled(token.clone()))
.into_diagnostic()?;
return Ok(());
}
let percentage = (i * 100 / len) as u32;
let uri = path_to_uri(fp);
let docs = match docs.lock() {
Ok(it) => it,
Err(err) => return Err(miette!(err.to_string())),
};
let file = if let Some(file) = docs.get_document(&uri) {
file
} else {
let bytes = match fs::read(fp) {
Ok(it) => it,
Err(_) => {
// continue on fs error
continue;
}
};
// skip if the file does not contain what we're looking for
let content_string = String::from_utf8_lossy(&bytes);
let text_to_search =
String::from_utf8_lossy(working_set.get_span_contents(span));
if !content_string.contains(text_to_search.as_ref()) {
// progress without any data
data_sender
.send(InternalMessage::OnGoing(token.clone(), percentage))
.into_diagnostic()?;
continue;
}
&FullTextDocument::new("nu".to_string(), 0, content_string.into())
};
let _ = find_reference_in_file(&mut working_set, file, fp, &id).map(|ranges| {
data_sender
.send(InternalMessage::RangeMessage(RangePerDoc { uri, ranges }))
.ok();
data_sender
.send(InternalMessage::OnGoing(token.clone(), percentage))
.ok();
});
} }
.and_then(|range| self.occurrences.insert(uri, range)); data_sender
self.send_progress_report(token.clone(), (i * 100 / len) as u32, None)? .send(InternalMessage::Finished(token.clone()))
} .into_diagnostic()?;
self.send_progress_end(token.clone(), Some("Done".to_string())) Ok(())
} });
Ok((cancel_sender, Arc::new(data_receiver)))
fn find_reference_in_file(
working_set: &mut StateWorkingSet,
file: &FullTextDocument,
fp: &Path,
id: &Id,
) -> Option<Vec<Range>> {
let fp_str = fp.to_str()?;
let block = parse(
working_set,
Some(fp_str),
file.get_content(None).as_bytes(),
false,
);
let file_span = working_set.get_span_for_filename(fp_str)?;
let offset = file_span.start;
let mut references: Vec<Span> = find_reference_by_id(&block, working_set, id);
// NOTE: for arguments whose declaration is in a signature
// which is not covered in the AST
if let Id::Variable(vid) = id {
let decl_span = working_set.get_variable(*vid).declaration_span;
if file_span.contains_span(decl_span)
&& decl_span.end > decl_span.start
&& !references.contains(&decl_span)
{
references.push(decl_span);
}
}
let occurs: Vec<Range> = references
.iter()
.map(|span| span_to_range(span, file, offset))
.collect();
// add_block to avoid repeated parsing
working_set.add_block(block);
(!occurs.is_empty()).then_some(occurs)
} }
fn get_workspace_folder_by_uri(&self, uri: &Uri) -> Option<WorkspaceFolder> { fn get_workspace_folder_by_uri(&self, uri: &Uri) -> Option<WorkspaceFolder> {
@ -231,15 +351,6 @@ impl LanguageServer {
}) })
.cloned() .cloned()
} }
fn find_nu_scripts_in_folder(folder_uri: &Uri) -> Result<Paths> {
let path = uri_to_path(folder_uri);
if !path.is_dir() {
return Err(miette!("\nworkspace folder does not exist."));
}
let pattern = format!("{}/**/*.nu", path.to_string_lossy());
glob(&pattern).into_diagnostic()
}
} }
#[cfg(test)] #[cfg(test)]
@ -525,7 +636,7 @@ mod tests {
open_unchecked(&client_connection, script.clone()); open_unchecked(&client_connection, script.clone());
let message_num = 4; let message_num = 5;
let messages = let messages =
send_rename_prepare_request(&client_connection, script.clone(), 3, 5, message_num); send_rename_prepare_request(&client_connection, script.clone(), 3, 5, message_num);
assert_eq!(messages.len(), message_num); assert_eq!(messages.len(), message_num);