Merge branch 'main' into polars_categorical_2

This commit is contained in:
Jack Wright 2025-04-07 08:40:58 -07:00
commit 705bc342fb
50 changed files with 757 additions and 433 deletions

8
Cargo.lock generated
View File

@ -4488,9 +4488,9 @@ dependencies = [
[[package]] [[package]]
name = "openssl" name = "openssl"
version = "0.10.70" version = "0.10.72"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6" checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da"
dependencies = [ dependencies = [
"bitflags 2.6.0", "bitflags 2.6.0",
"cfg-if", "cfg-if",
@ -4529,9 +4529,9 @@ dependencies = [
[[package]] [[package]]
name = "openssl-sys" name = "openssl-sys"
version = "0.9.105" version = "0.9.107"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc" checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",

View File

@ -150,7 +150,7 @@ rusqlite = "0.31"
rust-embed = "8.6.0" rust-embed = "8.6.0"
scopeguard = { version = "1.2.0" } scopeguard = { version = "1.2.0" }
serde = { version = "1.0" } serde = { version = "1.0" }
serde_json = "1.0" serde_json = "1.0.97"
serde_urlencoded = "0.7.1" serde_urlencoded = "0.7.1"
serde_yaml = "0.9.33" serde_yaml = "0.9.33"
sha2 = "0.10" sha2 = "0.10"

View File

@ -135,7 +135,7 @@ where
(min, max) => (rhs, lhs, max, min), (min, max) => (rhs, lhs, max, min),
}; };
let pad = iter::repeat(0).take(max_len - min_len); let pad = iter::repeat_n(0, max_len - min_len);
let mut a; let mut a;
let mut b; let mut b;
@ -159,9 +159,10 @@ where
} }
(Value::Binary { .. }, Value::Int { .. }) | (Value::Int { .. }, Value::Binary { .. }) => { (Value::Binary { .. }, Value::Int { .. }) | (Value::Int { .. }, Value::Binary { .. }) => {
Value::error( Value::error(
ShellError::PipelineMismatch { ShellError::OnlySupportsThisInputType {
exp_input_type: "input, and argument, to be both int or both binary" exp_input_type: "input, and argument, to be both int or both binary"
.to_string(), .to_string(),
wrong_type: "int and binary".to_string(),
dst_span: rhs.span(), dst_span: rhs.span(),
src_span: span, src_span: span,
}, },

View File

@ -249,7 +249,7 @@ fn shift_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize) -
Last | Only => lhs << bit_shift, Last | Only => lhs << bit_shift,
_ => (lhs << bit_shift) | (rhs >> (8 - bit_shift)), _ => (lhs << bit_shift) | (rhs >> (8 - bit_shift)),
}) })
.chain(iter::repeat(0).take(byte_shift)) .chain(iter::repeat_n(0, byte_shift))
.collect::<Vec<u8>>() .collect::<Vec<u8>>()
} }

View File

@ -1,6 +1,5 @@
use crate::{generate_strftime_list, parse_date_from_string}; use crate::{generate_strftime_list, parse_date_from_string};
use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, TimeZone, Utc}; use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, TimeZone, Utc};
use human_date_parser::{from_human_time, ParseResult};
use nu_cmd_base::input_handler::{operate, CmdArgument}; use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
@ -98,11 +97,6 @@ impl Command for IntoDatetime {
"Show all possible variables for use in --format flag", "Show all possible variables for use in --format flag",
Some('l'), Some('l'),
) )
.switch(
"list-human",
"Show human-readable datetime parsing examples",
Some('n'),
)
.rest( .rest(
"rest", "rest",
SyntaxShape::CellPath, SyntaxShape::CellPath,
@ -120,8 +114,6 @@ impl Command for IntoDatetime {
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
if call.has_flag(engine_state, stack, "list")? { if call.has_flag(engine_state, stack, "list")? {
Ok(generate_strftime_list(call.head, true).into_pipeline_data()) Ok(generate_strftime_list(call.head, true).into_pipeline_data())
} else if call.has_flag(engine_state, stack, "list-human")? {
Ok(list_human_readable_examples(call.head).into_pipeline_data())
} else { } else {
let cell_paths = call.rest(engine_state, stack, 0)?; let cell_paths = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths); let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
@ -256,21 +248,6 @@ impl Command for IntoDatetime {
Span::test_data(), Span::test_data(),
)), )),
}, },
Example {
description: "Parsing human readable datetimes",
example: "'Today at 18:30' | into datetime",
result: None,
},
Example {
description: "Parsing human readable datetimes",
example: "'Last Friday at 19:45' | into datetime",
result: None,
},
Example {
description: "Parsing human readable datetimes",
example: "'In 5 minutes and 30 seconds' | into datetime",
result: None,
},
] ]
} }
} }
@ -291,60 +268,9 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
if matches!(input, Value::String { .. }) && dateformat.is_none() { if matches!(input, Value::String { .. }) && dateformat.is_none() {
let span = input.span(); let span = input.span();
if let Ok(input_val) = input.coerce_str() { if let Ok(input_val) = input.coerce_str() {
match parse_date_from_string(&input_val, span) { if let Ok(date) = parse_date_from_string(&input_val, span) {
Ok(date) => return Value::date(date, span), return Value::date(date, span);
Err(_) => {
if let Ok(date) = from_human_time(&input_val, Local::now().naive_local()) {
match date {
ParseResult::Date(date) => {
let time = Local::now().time();
let combined = date.and_time(time);
let local_offset = *Local::now().offset();
let dt_fixed =
TimeZone::from_local_datetime(&local_offset, &combined)
.single()
.unwrap_or_default();
return Value::date(dt_fixed, span);
} }
ParseResult::DateTime(date) => {
let local_offset = *Local::now().offset();
let dt_fixed = match local_offset.from_local_datetime(&date) {
chrono::LocalResult::Single(dt) => dt,
chrono::LocalResult::Ambiguous(_, _) => {
return Value::error(
ShellError::DatetimeParseError {
msg: "Ambiguous datetime".to_string(),
span,
},
span,
);
}
chrono::LocalResult::None => {
return Value::error(
ShellError::DatetimeParseError {
msg: "Invalid datetime".to_string(),
span,
},
span,
);
}
};
return Value::date(dt_fixed, span);
}
ParseResult::Time(time) => {
let date = Local::now().date_naive();
let combined = date.and_time(time);
let local_offset = *Local::now().offset();
let dt_fixed =
TimeZone::from_local_datetime(&local_offset, &combined)
.single()
.unwrap_or_default();
return Value::date(dt_fixed, span);
}
}
}
}
};
} }
} }
@ -524,44 +450,6 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
} }
} }
fn list_human_readable_examples(span: Span) -> Value {
let examples: Vec<String> = vec![
"Today 18:30".into(),
"2022-11-07 13:25:30".into(),
"15:20 Friday".into(),
"This Friday 17:00".into(),
"13:25, Next Tuesday".into(),
"Last Friday at 19:45".into(),
"In 3 days".into(),
"In 2 hours".into(),
"10 hours and 5 minutes ago".into(),
"1 years ago".into(),
"A year ago".into(),
"A month ago".into(),
"A week ago".into(),
"A day ago".into(),
"An hour ago".into(),
"A minute ago".into(),
"A second ago".into(),
"Now".into(),
];
let records = examples
.iter()
.map(|s| {
Value::record(
record! {
"parseable human datetime examples" => Value::test_string(s.to_string()),
"result" => action(&Value::test_string(s.to_string()), &Arguments { zone_options: None, format_options: None, cell_paths: None }, span)
},
span,
)
})
.collect::<Vec<Value>>();
Value::list(records, span)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -593,14 +481,7 @@ mod tests {
} }
#[test] #[test]
#[ignore]
fn takes_a_date_format_without_timezone() { fn takes_a_date_format_without_timezone() {
// Ignoring this test for now because we changed the human-date-parser to use
// the users timezone instead of UTC. We may continue to tweak this behavior.
// Another hacky solution is to set the timezone to UTC in the test, which works
// on MacOS and Linux but hasn't been tested on Windows. Plus it kind of defeats
// the purpose of a "without_timezone" test.
// std::env::set_var("TZ", "UTC");
let date_str = Value::test_string("16.11.1984 8:00 am"); let date_str = Value::test_string("16.11.1984 8:00 am");
let fmt_options = Some(DatetimeFormat("%d.%m.%Y %H:%M %P".to_string())); let fmt_options = Some(DatetimeFormat("%d.%m.%Y %H:%M %P".to_string()));
let args = Arguments { let args = Arguments {

View File

@ -40,6 +40,7 @@ impl Command for SplitCellPath {
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let head = call.head; let head = call.head;
let input_type = input.get_type();
let src_span = match input { let src_span = match input {
// Early return on correct type and empty pipeline // Early return on correct type and empty pipeline
@ -54,8 +55,9 @@ impl Command for SplitCellPath {
PipelineData::ListStream(stream, ..) => stream.span(), PipelineData::ListStream(stream, ..) => stream.span(),
PipelineData::ByteStream(stream, ..) => stream.span(), PipelineData::ByteStream(stream, ..) => stream.span(),
}; };
Err(ShellError::PipelineMismatch { Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "cell-path".into(), exp_input_type: "cell-path".into(),
wrong_type: input_type.to_string(),
dst_span: head, dst_span: head,
src_span, src_span,
}) })

View File

@ -0,0 +1,259 @@
use chrono::{Local, TimeZone};
use human_date_parser::{from_human_time, ParseResult};
use nu_engine::command_prelude::*;
#[derive(Clone)]
pub struct DateFromHuman;
impl Command for DateFromHuman {
fn name(&self) -> &str {
"date from-human"
}
fn signature(&self) -> Signature {
Signature::build("date from-human")
.input_output_types(vec![
(Type::String, Type::Date),
(Type::Nothing, Type::table()),
])
.allow_variants_without_examples(true)
.switch(
"list",
"Show human-readable datetime parsing examples",
Some('l'),
)
.category(Category::Date)
}
fn description(&self) -> &str {
"Convert a human readable datetime string to a datetime."
}
fn search_terms(&self) -> Vec<&str> {
vec![
"relative",
"now",
"today",
"tomorrow",
"yesterday",
"weekday",
"weekday_name",
"timezone",
]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
if call.has_flag(engine_state, stack, "list")? {
return Ok(list_human_readable_examples(call.head).into_pipeline_data());
}
let head = call.head;
// This doesn't match explicit nulls
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(move |value| helper(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Parsing human readable datetime",
example: "'Today at 18:30' | date from-human",
result: None,
},
Example {
description: "Parsing human readable datetime",
example: "'Last Friday at 19:45' | date from-human",
result: None,
},
Example {
description: "Parsing human readable datetime",
example: "'In 5 minutes and 30 seconds' | date from-human",
result: None,
},
Example {
description: "PShow human-readable datetime parsing examples",
example: "date from-human --list",
result: None,
},
]
}
}
fn helper(value: Value, head: Span) -> Value {
let span = value.span();
let input_val = match value {
Value::String { val, .. } => val,
other => {
return Value::error(
ShellError::OnlySupportsThisInputType {
exp_input_type: "string".to_string(),
wrong_type: other.get_type().to_string(),
dst_span: head,
src_span: span,
},
span,
)
}
};
if let Ok(date) = from_human_time(&input_val, Local::now().naive_local()) {
match date {
ParseResult::Date(date) => {
let time = Local::now().time();
let combined = date.and_time(time);
let local_offset = *Local::now().offset();
let dt_fixed = TimeZone::from_local_datetime(&local_offset, &combined)
.single()
.unwrap_or_default();
return Value::date(dt_fixed, span);
}
ParseResult::DateTime(date) => {
let local_offset = *Local::now().offset();
let dt_fixed = match local_offset.from_local_datetime(&date) {
chrono::LocalResult::Single(dt) => dt,
chrono::LocalResult::Ambiguous(_, _) => {
return Value::error(
ShellError::DatetimeParseError {
msg: "Ambiguous datetime".to_string(),
span,
},
span,
);
}
chrono::LocalResult::None => {
return Value::error(
ShellError::DatetimeParseError {
msg: "Invalid datetime".to_string(),
span,
},
span,
);
}
};
return Value::date(dt_fixed, span);
}
ParseResult::Time(time) => {
let date = Local::now().date_naive();
let combined = date.and_time(time);
let local_offset = *Local::now().offset();
let dt_fixed = TimeZone::from_local_datetime(&local_offset, &combined)
.single()
.unwrap_or_default();
return Value::date(dt_fixed, span);
}
}
}
match from_human_time(&input_val, Local::now().naive_local()) {
Ok(date) => match date {
ParseResult::Date(date) => {
let time = Local::now().time();
let combined = date.and_time(time);
let local_offset = *Local::now().offset();
let dt_fixed = TimeZone::from_local_datetime(&local_offset, &combined)
.single()
.unwrap_or_default();
Value::date(dt_fixed, span)
}
ParseResult::DateTime(date) => {
let local_offset = *Local::now().offset();
let dt_fixed = match local_offset.from_local_datetime(&date) {
chrono::LocalResult::Single(dt) => dt,
chrono::LocalResult::Ambiguous(_, _) => {
return Value::error(
ShellError::DatetimeParseError {
msg: "Ambiguous datetime".to_string(),
span,
},
span,
);
}
chrono::LocalResult::None => {
return Value::error(
ShellError::DatetimeParseError {
msg: "Invalid datetime".to_string(),
span,
},
span,
);
}
};
Value::date(dt_fixed, span)
}
ParseResult::Time(time) => {
let date = Local::now().date_naive();
let combined = date.and_time(time);
let local_offset = *Local::now().offset();
let dt_fixed = TimeZone::from_local_datetime(&local_offset, &combined)
.single()
.unwrap_or_default();
Value::date(dt_fixed, span)
}
},
Err(_) => Value::error(
ShellError::IncorrectValue {
msg: "Cannot parse as humanized date".to_string(),
val_span: head,
call_span: span,
},
span,
),
}
}
fn list_human_readable_examples(span: Span) -> Value {
let examples: Vec<String> = vec![
"Today 18:30".into(),
"2022-11-07 13:25:30".into(),
"15:20 Friday".into(),
"This Friday 17:00".into(),
"13:25, Next Tuesday".into(),
"Last Friday at 19:45".into(),
"In 3 days".into(),
"In 2 hours".into(),
"10 hours and 5 minutes ago".into(),
"1 years ago".into(),
"A year ago".into(),
"A month ago".into(),
"A week ago".into(),
"A day ago".into(),
"An hour ago".into(),
"A minute ago".into(),
"A second ago".into(),
"Now".into(),
];
let records = examples
.iter()
.map(|s| {
Value::record(
record! {
"parseable human datetime examples" => Value::test_string(s.to_string()),
"result" => helper(Value::test_string(s.to_string()), span),
},
span,
)
})
.collect::<Vec<Value>>();
Value::list(records, span)
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(DateFromHuman {})
}
}

View File

@ -1,4 +1,5 @@
mod date_; mod date_;
mod from_human;
mod humanize; mod humanize;
mod list_timezone; mod list_timezone;
mod now; mod now;
@ -7,6 +8,7 @@ mod to_timezone;
mod utils; mod utils;
pub use date_::Date; pub use date_::Date;
pub use from_human::DateFromHuman;
pub use humanize::DateHumanize; pub use humanize::DateHumanize;
pub use list_timezone::DateListTimezones; pub use list_timezone::DateListTimezones;
pub use now::DateNow; pub use now::DateNow;

View File

@ -118,7 +118,7 @@ fn increase_string_width(text: &mut String, total: usize) {
let rest = total - width; let rest = total - width;
if rest > 0 { if rest > 0 {
text.extend(std::iter::repeat(' ').take(rest)); text.extend(std::iter::repeat_n(' ', rest));
} }
} }

View File

@ -272,6 +272,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
// Date // Date
bind_command! { bind_command! {
Date, Date,
DateFromHuman,
DateHumanize, DateHumanize,
DateListTimezones, DateListTimezones,
DateNow, DateNow,

View File

@ -378,10 +378,7 @@ fn ls_for_one_pattern(
.par_bridge() .par_bridge()
.filter_map(move |x| match x { .filter_map(move |x| match x {
Ok(path) => { Ok(path) => {
let metadata = match std::fs::symlink_metadata(&path) { let metadata = std::fs::symlink_metadata(&path).ok();
Ok(metadata) => Some(metadata),
Err(_) => None,
};
let hidden_dir_clone = Arc::clone(&hidden_dirs); let hidden_dir_clone = Arc::clone(&hidden_dirs);
let mut hidden_dir_mutex = hidden_dir_clone let mut hidden_dir_mutex = hidden_dir_clone
.lock() .lock()

View File

@ -243,7 +243,7 @@ mod test {
let chunks = chunk_read.map(|e| e.unwrap()).collect::<Vec<_>>(); let chunks = chunk_read.map(|e| e.unwrap()).collect::<Vec<_>>();
assert_eq!( assert_eq!(
chunks, chunks,
[s[..4].as_bytes(), s[4..8].as_bytes(), s[8..].as_bytes()] [&s.as_bytes()[..4], &s.as_bytes()[4..8], &s.as_bytes()[8..]]
); );
} }
@ -260,7 +260,7 @@ mod test {
let chunks = chunk_read.map(|e| e.unwrap()).collect::<Vec<_>>(); let chunks = chunk_read.map(|e| e.unwrap()).collect::<Vec<_>>();
assert_eq!( assert_eq!(
chunks, chunks,
[s[..4].as_bytes(), s[4..8].as_bytes(), s[8..].as_bytes()] [&s.as_bytes()[..4], &s.as_bytes()[4..8], &s.as_bytes()[8..]]
); );
} }

View File

@ -42,8 +42,9 @@ pub(crate) fn typecheck_merge(lhs: &Value, rhs: &Value, head: Span) -> Result<()
match (lhs.get_type(), rhs.get_type()) { match (lhs.get_type(), rhs.get_type()) {
(Type::Record { .. }, Type::Record { .. }) => Ok(()), (Type::Record { .. }, Type::Record { .. }) => Ok(()),
(_, _) if is_list_of_records(lhs) && is_list_of_records(rhs) => Ok(()), (_, _) if is_list_of_records(lhs) && is_list_of_records(rhs) => Ok(()),
_ => Err(ShellError::PipelineMismatch { other => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "input and argument to be both record or both table".to_string(), exp_input_type: "input and argument to be both record or both table".to_string(),
wrong_type: format!("{} and {}", other.0, other.1).to_string(),
dst_span: head, dst_span: head,
src_span: lhs.span(), src_span: lhs.span(),
}), }),

View File

@ -174,8 +174,9 @@ impl Command for Move {
PipelineData::Value(Value::Record { val, .. }, ..) => { PipelineData::Value(Value::Record { val, .. }, ..) => {
Ok(move_record_columns(&val, &columns, &location, head)?.into_pipeline_data()) Ok(move_record_columns(&val, &columns, &location, head)?.into_pipeline_data())
} }
_ => Err(ShellError::PipelineMismatch { other => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record or table".to_string(), exp_input_type: "record or table".to_string(),
wrong_type: other.get_type().to_string(),
dst_span: head, dst_span: head,
src_span: Span::new(head.start, head.start), src_span: Span::new(head.start, head.start),
}), }),

View File

@ -184,9 +184,10 @@ impl Command for Sort {
dst_span: value.span(), dst_span: value.span(),
}) })
} }
_ => { ref other => {
return Err(ShellError::PipelineMismatch { return Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record or list".to_string(), exp_input_type: "record or list".to_string(),
wrong_type: other.get_type().to_string(),
dst_span: call.head, dst_span: call.head,
src_span: value.span(), src_span: value.span(),
}) })

View File

@ -102,7 +102,7 @@ pub fn calculate(
mf(&new_vals?, span, name) mf(&new_vals?, span, name)
} }
PipelineData::Value(val, ..) => mf(&[val], span, name), PipelineData::Value(val, ..) => mf(&[val], span, name),
PipelineData::Empty { .. } => Err(ShellError::PipelineEmpty { dst_span: name }), PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: name }),
val => Err(ShellError::UnsupportedInput { val => Err(ShellError::UnsupportedInput {
msg: "Only ints, floats, lists, records, or ranges are supported".into(), msg: "Only ints, floats, lists, records, or ranges are supported".into(),
input: "value originates from here".into(), input: "value originates from here".into(),

View File

@ -723,7 +723,7 @@ fn transform_response_using_content_type(
) )
})? })?
.path_segments() .path_segments()
.and_then(|segments| segments.last()) .and_then(|mut segments| segments.next_back())
.and_then(|name| if name.is_empty() { None } else { Some(name) }) .and_then(|name| if name.is_empty() { None } else { Some(name) })
.and_then(|name| { .and_then(|name| {
PathBuf::from(name) PathBuf::from(name)

View File

@ -175,7 +175,7 @@ fn run(call: &Call, args: &Arguments, input: PipelineData) -> Result<PipelineDat
handle_value(stream.into_value(), args, head), handle_value(stream.into_value(), args, head),
metadata, metadata,
)), )),
PipelineData::Empty { .. } => Err(ShellError::PipelineEmpty { dst_span: head }), PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: head }),
_ => Err(ShellError::UnsupportedInput { _ => Err(ShellError::UnsupportedInput {
msg: "Input value cannot be joined".to_string(), msg: "Input value cannot be joined".to_string(),
input: "value originates from here".into(), input: "value originates from here".into(),
@ -221,14 +221,21 @@ fn join_list(parts: &[Value], head: Span, span: Span, args: &Arguments) -> Value
Value::list(vals, span) Value::list(vals, span)
} }
Err(_) => Value::error( Err(ShellError::CantConvert { from_type, .. }) => Value::error(
ShellError::PipelineMismatch { ShellError::OnlySupportsThisInputType {
exp_input_type: "string or record".into(), exp_input_type: "string or record".into(),
wrong_type: from_type,
dst_span: head, dst_span: head,
src_span: span, src_span: span,
}, },
span, span,
), ),
Err(_) => Value::error(
ShellError::NushellFailed {
msg: "failed to join path".into(),
},
span,
),
} }
} }
} }

View File

@ -51,21 +51,11 @@ fn handle_invalid_values(rest: Value, name: Span) -> Value {
fn err_from_value(rest: &Value, name: Span) -> ShellError { fn err_from_value(rest: &Value, name: Span) -> ShellError {
match rest { match rest {
Value::Error { error, .. } => *error.clone(), Value::Error { error, .. } => *error.clone(),
_ => { _ => ShellError::OnlySupportsThisInputType {
if rest.is_nothing() {
ShellError::OnlySupportsThisInputType {
exp_input_type: "string, record or list".into(), exp_input_type: "string, record or list".into(),
wrong_type: "nothing".into(), wrong_type: rest.get_type().to_string(),
dst_span: name,
src_span: rest.span(),
}
} else {
ShellError::PipelineMismatch {
exp_input_type: "string, row or list".into(),
dst_span: name, dst_span: name,
src_span: rest.span(), src_span: rest.span(),
} },
}
}
} }
} }

View File

@ -181,8 +181,11 @@ fn operate(
Value::List { vals, .. } => { Value::List { vals, .. } => {
let iter = vals.into_iter().map(move |val| { let iter = vals.into_iter().map(move |val| {
let span = val.span(); let span = val.span();
val.into_string().map_err(|_| ShellError::PipelineMismatch { let type_ = val.get_type();
val.into_string()
.map_err(|_| ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(), exp_input_type: "string".into(),
wrong_type: type_.to_string(),
dst_span: head, dst_span: head,
src_span: span, src_span: span,
}) })
@ -199,8 +202,9 @@ fn operate(
Ok(ListStream::new(iter, head, Signals::empty()).into()) Ok(ListStream::new(iter, head, Signals::empty()).into())
} }
value => Err(ShellError::PipelineMismatch { value => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(), exp_input_type: "string".into(),
wrong_type: value.get_type().to_string(),
dst_span: head, dst_span: head,
src_span: value.span(), src_span: value.span(),
}), }),

View File

@ -153,8 +153,9 @@ fn split_chars_helper(v: &Value, name: Span, graphemes: bool) -> Value {
) )
} else { } else {
Value::error( Value::error(
ShellError::PipelineMismatch { ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(), exp_input_type: "string".into(),
wrong_type: v.get_type().to_string(),
dst_span: name, dst_span: name,
src_span: v_span, src_span: v_span,
}, },

View File

@ -255,8 +255,9 @@ fn split_column_helper(
v => { v => {
let span = v.span(); let span = v.span();
vec![Value::error( vec![Value::error(
ShellError::PipelineMismatch { ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(), exp_input_type: "string".into(),
wrong_type: v.get_type().to_string(),
dst_span: head, dst_span: head,
src_span: span, src_span: span,
}, },

View File

@ -219,8 +219,9 @@ fn split_row_helper(v: &Value, regex: &Regex, max_split: Option<usize>, name: Sp
} }
} else { } else {
vec![Value::error( vec![Value::error(
ShellError::PipelineMismatch { ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(), exp_input_type: "string".into(),
wrong_type: v.get_type().to_string(),
dst_span: name, dst_span: name,
src_span: v_span, src_span: v_span,
}, },

View File

@ -226,8 +226,9 @@ fn split_words_helper(v: &Value, word_length: Option<usize>, span: Span, graphem
Value::list(words, v_span) Value::list(words, v_span)
} else { } else {
Value::error( Value::error(
ShellError::PipelineMismatch { ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(), exp_input_type: "string".into(),
wrong_type: v.get_type().to_string(),
dst_span: span, dst_span: span,
src_span: v_span, src_span: v_span,
}, },

View File

@ -237,14 +237,16 @@ fn run(
input.map( input.map(
move |v| { move |v| {
let value_span = v.span(); let value_span = v.span();
let type_ = v.get_type();
match v.coerce_into_string() { match v.coerce_into_string() {
Ok(s) => { Ok(s) => {
let contents = if is_path { s.replace('\\', "\\\\") } else { s }; let contents = if is_path { s.replace('\\', "\\\\") } else { s };
str_expand(&contents, span, value_span) str_expand(&contents, span, value_span)
} }
Err(_) => Value::error( Err(_) => Value::error(
ShellError::PipelineMismatch { ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(), exp_input_type: "string".into(),
wrong_type: type_.to_string(),
dst_span: span, dst_span: span,
src_span: value_span, src_span: value_span,
}, },

View File

@ -108,6 +108,7 @@ fn stats(
input.map( input.map(
move |v| { move |v| {
let value_span = v.span(); let value_span = v.span();
let type_ = v.get_type();
// First, obtain the span. If this fails, propagate the error that results. // First, obtain the span. If this fails, propagate the error that results.
if let Value::Error { error, .. } = v { if let Value::Error { error, .. } = v {
return Value::error(*error, span); return Value::error(*error, span);
@ -116,8 +117,9 @@ fn stats(
match v.coerce_into_string() { match v.coerce_into_string() {
Ok(s) => counter(&s, span), Ok(s) => counter(&s, span),
Err(_) => Value::error( Err(_) => Value::error(
ShellError::PipelineMismatch { ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(), exp_input_type: "string".into(),
wrong_type: type_.to_string(),
dst_span: span, dst_span: span,
src_span: value_span, src_span: value_span,
}, },

View File

@ -98,7 +98,7 @@ pub(crate) fn finish_redirection(
if !matches!( if !matches!(
modes.err, modes.err,
Some(Spanned { Some(Spanned {
item: RedirectMode::Pipe { .. }, item: RedirectMode::Pipe,
.. ..
}) })
) { ) {

View File

@ -323,9 +323,7 @@ fn repeat_vertical(
c: char, c: char,
style: TextStyle, style: TextStyle,
) { ) {
let text = std::iter::repeat(c) let text = std::iter::repeat_n(c, width as usize).collect::<String>();
.take(width as usize)
.collect::<String>();
let style = text_style_to_tui_style(style); let style = text_style_to_tui_style(style);
let span = Span::styled(text, style); let span = Span::styled(text, style);

View File

@ -3,12 +3,13 @@ use std::sync::Arc;
use crate::{span_to_range, uri_to_path, LanguageServer}; use crate::{span_to_range, uri_to_path, LanguageServer};
use lsp_types::{ use lsp_types::{
CompletionItem, CompletionItemKind, CompletionItemLabelDetails, CompletionParams, CompletionItem, CompletionItemKind, CompletionItemLabelDetails, CompletionParams,
CompletionResponse, CompletionTextEdit, Documentation, MarkupContent, MarkupKind, TextEdit, CompletionResponse, CompletionTextEdit, Documentation, InsertTextFormat, MarkupContent,
MarkupKind, Range, TextEdit,
}; };
use nu_cli::{NuCompleter, SuggestionKind}; use nu_cli::{NuCompleter, SemanticSuggestion, SuggestionKind};
use nu_protocol::{ use nu_protocol::{
engine::{CommandType, Stack}, engine::{CommandType, EngineState, Stack},
Span, PositionalArg, Span, SyntaxShape,
}; };
impl LanguageServer { impl LanguageServer {
@ -29,7 +30,7 @@ impl LanguageServer {
.is_some_and(|c| c.is_whitespace() || "|(){}[]<>,:;".contains(c)); .is_some_and(|c| c.is_whitespace() || "|(){}[]<>,:;".contains(c));
self.need_parse |= need_fallback; self.need_parse |= need_fallback;
let engine_state = Arc::new(self.new_engine_state()); let engine_state = Arc::new(self.new_engine_state(Some(&path_uri)));
let completer = NuCompleter::new(engine_state.clone(), Arc::new(Stack::new())); let completer = NuCompleter::new(engine_state.clone(), Arc::new(Stack::new()));
let results = if need_fallback { let results = if need_fallback {
completer.fetch_completions_at(&file_text[..location], location) completer.fetch_completions_at(&file_text[..location], location)
@ -45,27 +46,96 @@ impl LanguageServer {
results results
.into_iter() .into_iter()
.map(|r| { .map(|r| {
let decl_id = r.kind.clone().and_then(|kind| { let reedline_span = r.suggestion.span;
matches!(kind, SuggestionKind::Command(_)) Self::completion_item_from_suggestion(
.then_some(engine_state.find_decl(r.suggestion.value.as_bytes(), &[])?) &engine_state,
}); r,
span_to_range(&Span::new(reedline_span.start, reedline_span.end), file, 0),
let mut label_value = r.suggestion.value; )
if r.suggestion.append_whitespace { })
label_value.push(' '); .collect(),
))
}
fn completion_item_from_suggestion(
engine_state: &EngineState,
suggestion: SemanticSuggestion,
range: Range,
) -> CompletionItem {
let decl_id = suggestion.kind.as_ref().and_then(|kind| {
matches!(kind, SuggestionKind::Command(_))
.then_some(engine_state.find_decl(suggestion.suggestion.value.as_bytes(), &[])?)
});
let mut snippet_text = suggestion.suggestion.value.clone();
let mut doc_string = suggestion.suggestion.extra.map(|ex| ex.join("\n"));
let mut insert_text_format = None;
let mut idx = 0;
// use snippet as `insert_text_format` for command argument completion
if let Some(decl_id) = decl_id {
let cmd = engine_state.get_decl(decl_id);
doc_string = Some(Self::get_decl_description(cmd, true));
insert_text_format = Some(InsertTextFormat::SNIPPET);
let signature = cmd.signature();
// add curly brackets around block arguments
// and keywords, e.g. `=` in `alias foo = bar`
let mut arg_wrapper = |arg: &PositionalArg, text: String, optional: bool| -> String {
idx += 1;
match &arg.shape {
SyntaxShape::Block | SyntaxShape::MatchBlock => {
format!("{{ ${{{}:{}}} }}", idx, text)
}
SyntaxShape::Keyword(kwd, _) => {
// NOTE: If optional, the keyword should also be in a placeholder so that it can be removed easily.
// Here we choose to use nested placeholders. Note that some editors don't fully support this format,
// but usually they will simply ignore the inner ones, so it should be fine.
if optional {
idx += 1;
format!(
"${{{}:{} ${{{}:{}}}}}",
idx - 1,
String::from_utf8_lossy(kwd),
idx,
text
)
} else {
format!("{} ${{{}:{}}}", String::from_utf8_lossy(kwd), idx, text)
}
}
_ => format!("${{{}:{}}}", idx, text),
}
};
for required in signature.required_positional {
snippet_text.push(' ');
snippet_text
.push_str(arg_wrapper(&required, required.name.clone(), false).as_str());
}
for optional in signature.optional_positional {
snippet_text.push(' ');
snippet_text
.push_str(arg_wrapper(&optional, format!("{}?", optional.name), true).as_str());
}
if let Some(rest) = signature.rest_positional {
idx += 1;
snippet_text.push_str(format!(" ${{{}:...{}}}", idx, rest.name).as_str());
}
}
// no extra space for a command with args expanded in the snippet
if idx == 0 && suggestion.suggestion.append_whitespace {
snippet_text.push(' ');
} }
let span = r.suggestion.span;
let text_edit = Some(CompletionTextEdit::Edit(TextEdit { let text_edit = Some(CompletionTextEdit::Edit(TextEdit {
range: span_to_range(&Span::new(span.start, span.end), file, 0), range,
new_text: label_value.clone(), new_text: snippet_text,
})); }));
CompletionItem { CompletionItem {
label: label_value, label: suggestion.suggestion.value,
label_details: r label_details: suggestion
.kind .kind
.clone() .as_ref()
.map(|kind| match kind { .map(|kind| match kind {
SuggestionKind::Value(t) => t.to_string(), SuggestionKind::Value(t) => t.to_string(),
SuggestionKind::Command(cmd) => cmd.to_string(), SuggestionKind::Command(cmd) => cmd.to_string(),
@ -79,27 +149,18 @@ impl LanguageServer {
detail: None, detail: None,
description: Some(s), description: Some(s),
}), }),
detail: r.suggestion.description, detail: suggestion.suggestion.description,
documentation: r documentation: doc_string.map(|value| {
.suggestion
.extra
.map(|ex| ex.join("\n"))
.or(decl_id.map(|decl_id| {
Self::get_decl_description(engine_state.get_decl(decl_id), true)
}))
.map(|value| {
Documentation::MarkupContent(MarkupContent { Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown, kind: MarkupKind::Markdown,
value, value,
}) })
}), }),
kind: Self::lsp_completion_item_kind(r.kind), kind: Self::lsp_completion_item_kind(suggestion.kind),
text_edit, text_edit,
insert_text_format,
..Default::default() ..Default::default()
} }
})
.collect(),
))
} }
fn lsp_completion_item_kind( fn lsp_completion_item_kind(
@ -264,10 +325,10 @@ mod tests {
let resp = send_complete_request(&client_connection, script.clone(), 2, 18); let resp = send_complete_request(&client_connection, script.clone(), 2, 18);
assert!(result_from_message(resp).as_array().unwrap().contains( assert!(result_from_message(resp).as_array().unwrap().contains(
&serde_json::json!({ &serde_json::json!({
"label": "LICENSE", "label": "command.nu",
"labelDetails": { "description": "" }, "labelDetails": { "description": "" },
"textEdit": { "range": { "start": { "line": 2, "character": 17 }, "end": { "line": 2, "character": 18 }, }, "textEdit": { "range": { "start": { "line": 2, "character": 17 }, "end": { "line": 2, "character": 18 }, },
"newText": "LICENSE" "newText": "command.nu"
}, },
"kind": 17 "kind": 17
}) })
@ -310,8 +371,9 @@ mod tests {
"detail": "Alias a command (with optional flags) to a new name.", "detail": "Alias a command (with optional flags) to a new name.",
"textEdit": { "textEdit": {
"range": { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 0 }, }, "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 0 }, },
"newText": "alias " "newText": "alias ${1:name} = ${2:initial_value}"
}, },
"insertTextFormat": 2,
"kind": 14 "kind": 14
} }
]) ])
@ -327,8 +389,9 @@ mod tests {
"detail": "Alias a command (with optional flags) to a new name.", "detail": "Alias a command (with optional flags) to a new name.",
"textEdit": { "textEdit": {
"range": { "start": { "line": 3, "character": 2 }, "end": { "line": 3, "character": 2 }, }, "range": { "start": { "line": 3, "character": 2 }, "end": { "line": 3, "character": 2 }, },
"newText": "alias " "newText": "alias ${1:name} = ${2:initial_value}"
}, },
"insertTextFormat": 2,
"kind": 14 "kind": 14
} }
]) ])
@ -337,10 +400,10 @@ mod tests {
let resp = send_complete_request(&client_connection, script, 5, 4); let resp = send_complete_request(&client_connection, script, 5, 4);
assert!(result_from_message(resp).as_array().unwrap().contains( assert!(result_from_message(resp).as_array().unwrap().contains(
&serde_json::json!({ &serde_json::json!({
"label": "LICENSE", "label": "cell_path.nu",
"labelDetails": { "description": "" }, "labelDetails": { "description": "" },
"textEdit": { "range": { "start": { "line": 5, "character": 3 }, "end": { "line": 5, "character": 4 }, }, "textEdit": { "range": { "start": { "line": 5, "character": 3 }, "end": { "line": 5, "character": 4 }, },
"newText": "LICENSE" "newText": "cell_path.nu"
}, },
"kind": 17 "kind": 17
}) })
@ -369,8 +432,9 @@ mod tests {
"detail": "Trim whitespace or specific character.", "detail": "Trim whitespace or specific character.",
"textEdit": { "textEdit": {
"range": { "start": { "line": 0, "character": 8 }, "end": { "line": 0, "character": 13 }, }, "range": { "start": { "line": 0, "character": 8 }, "end": { "line": 0, "character": 13 }, },
"newText": "str trim " "newText": "str trim ${1:...rest}"
}, },
"insertTextFormat": 2,
"kind": 3 "kind": 3
} }
]) ])
@ -488,7 +552,7 @@ mod tests {
"detail": "Alias a command (with optional flags) to a new name.", "detail": "Alias a command (with optional flags) to a new name.",
"textEdit": { "textEdit": {
"range": { "start": { "line": 0, "character": 5 }, "end": { "line": 0, "character": 5 }, }, "range": { "start": { "line": 0, "character": 5 }, "end": { "line": 0, "character": 5 }, },
"newText": "alias " "newText": "alias ${1:name} = ${2:initial_value}"
}, },
"kind": 14 "kind": 14
}, },

View File

@ -7,7 +7,7 @@ use miette::{miette, IntoDiagnostic, Result};
impl LanguageServer { impl LanguageServer {
pub(crate) fn publish_diagnostics_for_file(&mut self, uri: Uri) -> Result<()> { pub(crate) fn publish_diagnostics_for_file(&mut self, uri: Uri) -> Result<()> {
let mut engine_state = self.new_engine_state(); let mut engine_state = self.new_engine_state(Some(&uri));
engine_state.generate_nu_constant(); engine_state.generate_nu_constant();
let Some((_, span, working_set)) = self.parse_file(&mut engine_state, &uri, true) else { let Some((_, span, working_set)) = self.parse_file(&mut engine_state, &uri, true) else {

View File

@ -77,13 +77,12 @@ impl LanguageServer {
&mut self, &mut self,
params: &GotoDefinitionParams, params: &GotoDefinitionParams,
) -> Option<GotoDefinitionResponse> { ) -> Option<GotoDefinitionResponse> {
let mut engine_state = self.new_engine_state();
let path_uri = params let path_uri = params
.text_document_position_params .text_document_position_params
.text_document .text_document
.uri .uri
.to_owned(); .to_owned();
let mut engine_state = self.new_engine_state(Some(&path_uri));
let (working_set, id, _, _) = self let (working_set, id, _, _) = self
.parse_and_find( .parse_and_find(
&mut engine_state, &mut engine_state,

View File

@ -1,7 +1,10 @@
use lsp_types::{Hover, HoverContents, HoverParams, MarkupContent, MarkupKind}; use lsp_types::{Hover, HoverContents, HoverParams, MarkupContent, MarkupKind};
use nu_protocol::engine::Command; use nu_protocol::{engine::Command, PositionalArg};
use crate::{Id, LanguageServer}; use crate::{
signature::{display_flag, doc_for_arg, get_signature_label},
Id, LanguageServer,
};
impl LanguageServer { impl LanguageServer {
pub(crate) fn get_decl_description(decl: &dyn Command, skip_description: bool) -> String { pub(crate) fn get_decl_description(decl: &dyn Command, skip_description: bool) -> String {
@ -19,35 +22,27 @@ impl LanguageServer {
// Usage // Usage
description.push_str("---\n### Usage \n```nu\n"); description.push_str("---\n### Usage \n```nu\n");
let signature = decl.signature(); let signature = decl.signature();
description.push_str(&Self::get_signature_label(&signature)); description.push_str(&get_signature_label(&signature, true));
description.push_str("\n```\n"); description.push_str("\n```\n");
// Flags // Flags
if !signature.named.is_empty() { if !signature.named.is_empty() {
description.push_str("\n### Flags\n\n"); description.push_str("\n### Flags\n\n");
let mut first = true; let mut first = true;
for named in &signature.named { for named in signature.named {
if first { if first {
first = false; first = false;
} else { } else {
description.push('\n'); description.push('\n');
} }
description.push_str(" "); description.push_str(" ");
if let Some(short_flag) = &named.short { description.push_str(&display_flag(&named, true));
description.push_str(&format!("`-{short_flag}`")); description.push_str(&doc_for_arg(
} named.arg,
if !named.long.is_empty() { named.desc,
if named.short.is_some() { named.default_value,
description.push_str(", "); false,
} ));
description.push_str(&format!("`--{}`", named.long));
}
if let Some(arg) = &named.arg {
description.push_str(&format!(" `<{}>`", arg.to_type()));
}
if !named.desc.is_empty() {
description.push_str(&format!(" - {}", named.desc));
}
description.push('\n'); description.push('\n');
} }
description.push('\n'); description.push('\n');
@ -60,46 +55,38 @@ impl LanguageServer {
{ {
description.push_str("\n### Parameters\n\n"); description.push_str("\n### Parameters\n\n");
let mut first = true; let mut first = true;
for required_arg in &signature.required_positional { let mut write_arg = |arg: PositionalArg, optional: bool| {
if first { if first {
first = false; first = false;
} else { } else {
description.push('\n'); description.push('\n');
} }
description.push_str(&format!( description.push_str(&format!(" `{}`", arg.name));
" `{}: {}`", description.push_str(&doc_for_arg(
required_arg.name, Some(arg.shape),
required_arg.shape.to_type() arg.desc,
arg.default_value,
optional,
)); ));
if !required_arg.desc.is_empty() {
description.push_str(&format!(" - {}", required_arg.desc));
}
description.push('\n'); description.push('\n');
};
for required_arg in signature.required_positional {
write_arg(required_arg, false);
} }
for optional_arg in &signature.optional_positional { for optional_arg in signature.optional_positional {
if first { write_arg(optional_arg, true);
first = false;
} else {
description.push('\n');
} }
description.push_str(&format!( if let Some(arg) = signature.rest_positional {
" `{}: {}`",
optional_arg.name,
optional_arg.shape.to_type()
));
if !optional_arg.desc.is_empty() {
description.push_str(&format!(" - {}", optional_arg.desc));
}
description.push('\n');
}
if let Some(arg) = &signature.rest_positional {
if !first { if !first {
description.push('\n'); description.push('\n');
} }
description.push_str(&format!(" `...{}: {}`", arg.name, arg.shape.to_type())); description.push_str(&format!(" `...{}`", arg.name));
if !arg.desc.is_empty() { description.push_str(&doc_for_arg(
description.push_str(&format!(" - {}", arg.desc)); Some(arg.shape),
} arg.desc,
arg.default_value,
false,
));
description.push('\n'); description.push('\n');
} }
description.push('\n'); description.push('\n');
@ -129,13 +116,12 @@ impl LanguageServer {
} }
pub(crate) fn hover(&mut self, params: &HoverParams) -> Option<Hover> { pub(crate) fn hover(&mut self, params: &HoverParams) -> Option<Hover> {
let mut engine_state = self.new_engine_state();
let path_uri = params let path_uri = params
.text_document_position_params .text_document_position_params
.text_document .text_document
.uri .uri
.to_owned(); .to_owned();
let mut engine_state = self.new_engine_state(Some(&path_uri));
let (working_set, id, _, _) = self let (working_set, id, _, _) = self
.parse_and_find( .parse_and_find(
&mut engine_state, &mut engine_state,
@ -379,7 +365,7 @@ mod hover_tests {
serde_json::json!({ serde_json::json!({
"contents": { "contents": {
"kind": "markdown", "kind": "markdown",
"value": "Concatenate multiple strings into a single string, with an optional separator between each.\n---\n### Usage \n```nu\n str join {flags} <separator?>\n```\n\n### Flags\n\n `-h`, `--help` - Display the help message for this command\n\n\n### Parameters\n\n `separator: string` - Optional separator to use when creating string.\n\n\n### Input/output types\n\n```nu\n list<any> | string\n string | string\n\n```\n### Example(s)\n Create a string from input\n```nu\n ['nu', 'shell'] | str join\n```\n Create a string from input with a separator\n```nu\n ['nu', 'shell'] | str join '-'\n```\n" "value": "Concatenate multiple strings into a single string, with an optional separator between each.\n---\n### Usage \n```nu\n str join {flags} (separator)\n```\n\n### Flags\n\n `-h`, `--help` - Display the help message for this command\n\n\n### Parameters\n\n `separator`: `<string>` - Optional separator to use when creating string. (optional)\n\n\n### Input/output types\n\n```nu\n list<any> | string\n string | string\n\n```\n### Example(s)\n Create a string from input\n```nu\n ['nu', 'shell'] | str join\n```\n Create a string from input with a separator\n```nu\n ['nu', 'shell'] | str join '-'\n```\n"
} }
}) })
); );

View File

@ -13,7 +13,7 @@ use miette::{miette, IntoDiagnostic, Result};
use nu_protocol::{ use nu_protocol::{
ast::{Block, PathMember}, ast::{Block, PathMember},
engine::{EngineState, StateDelta, StateWorkingSet}, engine::{EngineState, StateDelta, StateWorkingSet},
DeclId, ModuleId, Span, Type, VarId, DeclId, ModuleId, Span, Type, Value, VarId,
}; };
use std::{ use std::{
collections::BTreeMap, collections::BTreeMap,
@ -315,13 +315,26 @@ impl LanguageServer {
Ok(reset) Ok(reset)
} }
pub(crate) fn new_engine_state(&self) -> EngineState { /// Create a clone of the initial_engine_state with:
///
/// * PWD set to the parent directory of given uri. Fallback to `$env.PWD` if None.
/// * `StateDelta` cache merged
pub(crate) fn new_engine_state(&self, uri: Option<&Uri>) -> EngineState {
let mut engine_state = self.initial_engine_state.clone(); let mut engine_state = self.initial_engine_state.clone();
let cwd = std::env::current_dir().expect("Could not get current working directory."); match uri {
engine_state.add_env_var( Some(uri) => {
"PWD".into(), let path = uri_to_path(uri);
nu_protocol::Value::test_string(cwd.to_string_lossy()), if let Some(path) = path.parent() {
); engine_state
.add_env_var("PWD".into(), Value::test_string(path.to_string_lossy()))
};
}
None => {
let cwd =
std::env::current_dir().expect("Could not get current working directory.");
engine_state.add_env_var("PWD".into(), Value::test_string(cwd.to_string_lossy()));
}
}
// merge the cached `StateDelta` if text not changed // merge the cached `StateDelta` if text not changed
if !self.need_parse { if !self.need_parse {
engine_state engine_state
@ -350,7 +363,7 @@ impl LanguageServer {
engine_state: &'a mut EngineState, engine_state: &'a mut EngineState,
uri: &Uri, uri: &Uri,
pos: Position, pos: Position,
) -> Result<(StateWorkingSet<'a>, Id, Span, usize)> { ) -> Result<(StateWorkingSet<'a>, Id, Span, Span)> {
let (block, file_span, working_set) = self let (block, file_span, working_set) = self
.parse_file(engine_state, uri, false) .parse_file(engine_state, uri, false)
.ok_or_else(|| miette!("\nFailed to parse current file"))?; .ok_or_else(|| miette!("\nFailed to parse current file"))?;
@ -365,7 +378,7 @@ impl LanguageServer {
let location = file.offset_at(pos) as usize + file_span.start; let location = file.offset_at(pos) as usize + file_span.start;
let (id, span) = ast::find_id(&block, &working_set, &location) let (id, span) = ast::find_id(&block, &working_set, &location)
.ok_or_else(|| miette!("\nFailed to find current name"))?; .ok_or_else(|| miette!("\nFailed to find current name"))?;
Ok((working_set, id, span, file_span.start)) Ok((working_set, id, span, file_span))
} }
pub(crate) fn parse_file<'a>( pub(crate) fn parse_file<'a>(
@ -458,10 +471,7 @@ mod tests {
engine_state.generate_nu_constant(); engine_state.generate_nu_constant();
assert!(load_standard_library(&mut engine_state).is_ok()); assert!(load_standard_library(&mut engine_state).is_ok());
let cwd = std::env::current_dir().expect("Could not get current working directory."); let cwd = std::env::current_dir().expect("Could not get current working directory.");
engine_state.add_env_var( engine_state.add_env_var("PWD".into(), Value::test_string(cwd.to_string_lossy()));
"PWD".into(),
nu_protocol::Value::test_string(cwd.to_string_lossy()),
);
if let Some(code) = nu_config_code { if let Some(code) = nu_config_code {
assert!(merge_input(code.as_bytes(), &mut engine_state, &mut Stack::new()).is_ok()); assert!(merge_input(code.as_bytes(), &mut engine_state, &mut Stack::new()).is_ok());
} }

View File

@ -162,7 +162,7 @@ mod tests {
serde_json::json!({ serde_json::json!({
"contents": { "contents": {
"kind": "markdown", "kind": "markdown",
"value": "Create a variable and give it a value.\n\nThis command is a parser keyword. For details, check:\n https://www.nushell.sh/book/thinking_in_nu.html\n---\n### Usage \n```nu\n let {flags} <var_name> <initial_value>\n```\n\n### Flags\n\n `-h`, `--help` - Display the help message for this command\n\n\n### Parameters\n\n `var_name: any` - Variable name.\n\n `initial_value: any` - Equals sign followed by value.\n\n\n### Input/output types\n\n```nu\n any | nothing\n\n```\n### Example(s)\n Set a variable to a value\n```nu\n let x = 10\n```\n Set a variable to the result of an expression\n```nu\n let x = 10 + 100\n```\n Set a variable based on the condition\n```nu\n let x = if false { -1 } else { 1 }\n```\n" "value": "Create a variable and give it a value.\n\nThis command is a parser keyword. For details, check:\n https://www.nushell.sh/book/thinking_in_nu.html\n---\n### Usage \n```nu\n let {flags} <var_name> = <initial_value>\n```\n\n### Flags\n\n `-h`, `--help` - Display the help message for this command\n\n\n### Parameters\n\n `var_name`: `<vardecl>` - Variable name.\n\n `initial_value`: `<variable>` - Equals sign followed by value.\n\n\n### Input/output types\n\n```nu\n any | nothing\n\n```\n### Example(s)\n Set a variable to a value\n```nu\n let x = 10\n```\n Set a variable to the result of an expression\n```nu\n let x = 10 + 100\n```\n Set a variable based on the condition\n```nu\n let x = if false { -1 } else { 1 }\n```\n"
} }
}) })
); );

View File

@ -5,7 +5,7 @@ use lsp_types::{
use nu_protocol::{ use nu_protocol::{
ast::{Argument, Call, Expr, Expression, FindMapResult, Traverse}, ast::{Argument, Call, Expr, Expression, FindMapResult, Traverse},
engine::StateWorkingSet, engine::StateWorkingSet,
PositionalArg, Signature, Flag, PositionalArg, Signature, SyntaxShape, Value,
}; };
use crate::{uri_to_path, LanguageServer}; use crate::{uri_to_path, LanguageServer};
@ -35,34 +35,85 @@ fn find_active_internal_call<'a>(
} }
} }
impl LanguageServer { pub(crate) fn display_flag(flag: &Flag, verbitam: bool) -> String {
pub(crate) fn get_signature_label(signature: &Signature) -> String { let md_backtick = if verbitam { "`" } else { "" };
let mut text = String::new();
if let Some(short_flag) = flag.short {
text.push_str(&format!("{md_backtick}-{short_flag}{md_backtick}"));
}
if !flag.long.is_empty() {
if flag.short.is_some() {
text.push_str(", ");
}
text.push_str(&format!("{md_backtick}--{}{md_backtick}", flag.long));
}
text
}
pub(crate) fn doc_for_arg(
syntax_shape: Option<SyntaxShape>,
desc: String,
default_value: Option<Value>,
optional: bool,
) -> String {
let mut text = String::new();
if let Some(mut shape) = syntax_shape {
if let SyntaxShape::Keyword(_, inner_shape) = shape {
shape = *inner_shape;
}
text.push_str(&format!(": `<{}>`", shape));
}
if !(desc.is_empty() && default_value.is_none()) || optional {
text.push_str(" -")
};
if !desc.is_empty() {
text.push_str(&format!(" {}", desc));
};
if let Some(value) = default_value.as_ref().and_then(|v| v.coerce_str().ok()) {
text.push_str(&format!(
" ({}default: `{value}`)",
if optional { "optional, " } else { "" }
));
} else if optional {
text.push_str(" (optional)");
}
text
}
pub(crate) fn get_signature_label(signature: &Signature, indent: bool) -> String {
let expand_keyword = |arg: &PositionalArg, optional: bool| match &arg.shape {
SyntaxShape::Keyword(kwd, _) => {
format!("{} <{}>", String::from_utf8_lossy(kwd), arg.name)
}
_ => {
if optional {
arg.name.clone()
} else {
format!("<{}>", arg.name)
}
}
};
let mut label = String::new(); let mut label = String::new();
label.push_str(&format!(" {}", signature.name)); if indent {
label.push_str(" ");
}
label.push_str(&signature.name);
if !signature.named.is_empty() { if !signature.named.is_empty() {
label.push_str(" {flags}"); label.push_str(" {flags}");
} }
for required_arg in &signature.required_positional { for required_arg in &signature.required_positional {
label.push_str(&format!(" <{}>", required_arg.name)); label.push_str(&format!(" {}", expand_keyword(required_arg, false)));
} }
for optional_arg in &signature.optional_positional { for optional_arg in &signature.optional_positional {
let value_info = if let Some(value) = optional_arg label.push_str(&format!(" ({})", expand_keyword(optional_arg, true)));
.default_value
.as_ref()
.and_then(|v| v.coerce_str().ok())
{
format!("={}", value)
} else {
String::new()
};
label.push_str(&format!(" <{}?{}>", optional_arg.name, value_info));
} }
if let Some(arg) = &signature.rest_positional { if let Some(arg) = &signature.rest_positional {
label.push_str(&format!(" <...{}>", arg.name)); label.push_str(&format!(" ...({})", arg.name));
} }
label label
} }
impl LanguageServer {
pub(crate) fn get_signature_help( pub(crate) fn get_signature_help(
&mut self, &mut self,
params: &SignatureHelpParams, params: &SignatureHelpParams,
@ -78,7 +129,7 @@ impl LanguageServer {
let file_text = file.get_content(None).to_owned(); let file_text = file.get_content(None).to_owned();
drop(docs); drop(docs);
let engine_state = self.new_engine_state(); let engine_state = self.new_engine_state(Some(&path_uri));
let mut working_set = StateWorkingSet::new(&engine_state); let mut working_set = StateWorkingSet::new(&engine_state);
// NOTE: in case the cursor is at the end of the call expression // NOTE: in case the cursor is at the end of the call expression
@ -120,6 +171,7 @@ impl LanguageServer {
find_active_internal_call(expr, &working_set, pos_to_search) find_active_internal_call(expr, &working_set, pos_to_search)
})?; })?;
let active_signature = working_set.get_decl(active_call.decl_id).signature(); let active_signature = working_set.get_decl(active_call.decl_id).signature();
let label = get_signature_label(&active_signature, false);
let mut param_num_before_pos = 0; let mut param_num_before_pos = 0;
for arg in active_call.arguments.iter() { for arg in active_call.arguments.iter() {
@ -133,39 +185,51 @@ impl LanguageServer {
break; break;
} }
} }
let str_to_doc = |s: String| { let str_to_doc = |s: String| {
Some(Documentation::MarkupContent(MarkupContent { Some(Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown, kind: MarkupKind::Markdown,
value: s, value: s,
})) }))
}; };
let arg_to_param_info = |arg: &PositionalArg| ParameterInformation { let arg_to_param_info = |arg: PositionalArg, optional: bool| ParameterInformation {
label: lsp_types::ParameterLabel::Simple(arg.name.to_owned()), label: lsp_types::ParameterLabel::Simple(arg.name),
documentation: str_to_doc(format!( documentation: str_to_doc(doc_for_arg(
": `<{}>` - {}", Some(arg.shape),
arg.shape.to_type(), arg.desc,
arg.desc.to_owned() arg.default_value,
optional,
)), )),
}; };
let flag_to_param_info = |flag: Flag| ParameterInformation {
label: lsp_types::ParameterLabel::Simple(display_flag(&flag, false)),
documentation: str_to_doc(doc_for_arg(flag.arg, flag.desc, flag.default_value, false)),
};
// positional args
let mut parameters: Vec<ParameterInformation> = active_signature let mut parameters: Vec<ParameterInformation> = active_signature
.required_positional .required_positional
.iter() .into_iter()
.map(arg_to_param_info) .map(|arg| arg_to_param_info(arg, false))
.chain( .chain(
active_signature active_signature
.optional_positional .optional_positional
.iter() .into_iter()
.map(arg_to_param_info), .map(|arg| arg_to_param_info(arg, true)),
) )
.collect(); .collect();
if let Some(rest_arg) = &active_signature.rest_positional { if let Some(rest_arg) = active_signature.rest_positional {
parameters.push(arg_to_param_info(rest_arg)); parameters.push(arg_to_param_info(rest_arg, false));
} }
let max_idx = parameters.len().saturating_sub(1) as u32; let max_idx = parameters.len().saturating_sub(1) as u32;
let active_parameter = Some(param_num_before_pos.min(max_idx)); let active_parameter = Some(param_num_before_pos.min(max_idx));
// also include flags in the end, just for documentation
parameters.extend(active_signature.named.into_iter().map(flag_to_param_info));
Some(SignatureHelp { Some(SignatureHelp {
signatures: vec![SignatureInformation { signatures: vec![SignatureInformation {
label: Self::get_signature_label(&active_signature), label,
documentation: str_to_doc(active_signature.description), documentation: str_to_doc(active_signature.description),
parameters: Some(parameters), parameters: Some(parameters),
active_parameter, active_parameter,
@ -233,7 +297,7 @@ mod tests {
actual: result_from_message(resp), actual: result_from_message(resp),
expected: serde_json::json!({ expected: serde_json::json!({
"signatures": [{ "signatures": [{
"label": " str substring {flags} <range> <...rest>", "label": "str substring {flags} <range> ...(rest)",
"parameters": [ ], "parameters": [ ],
"activeParameter": 0 "activeParameter": 0
}], }],
@ -263,7 +327,7 @@ mod tests {
assert_json_include!( assert_json_include!(
actual: result_from_message(resp), actual: result_from_message(resp),
expected: serde_json::json!({ "signatures": [{ expected: serde_json::json!({ "signatures": [{
"label": " str substring {flags} <range> <...rest>", "label": "str substring {flags} <range> ...(rest)",
"activeParameter": 1 "activeParameter": 1
}]}) }]})
); );
@ -272,7 +336,7 @@ mod tests {
assert_json_include!( assert_json_include!(
actual: result_from_message(resp), actual: result_from_message(resp),
expected: serde_json::json!({ "signatures": [{ expected: serde_json::json!({ "signatures": [{
"label": " str substring {flags} <range> <...rest>", "label": "str substring {flags} <range> ...(rest)",
"activeParameter": 0 "activeParameter": 0
}]}) }]})
); );
@ -281,7 +345,7 @@ mod tests {
assert_json_include!( assert_json_include!(
actual: result_from_message(resp), actual: result_from_message(resp),
expected: serde_json::json!({ "signatures": [{ expected: serde_json::json!({ "signatures": [{
"label": " echo {flags} <...rest>", "label": "echo {flags} ...(rest)",
"activeParameter": 0 "activeParameter": 0
}]}) }]})
); );
@ -291,8 +355,8 @@ mod tests {
fn signature_help_on_custom_commands() { fn signature_help_on_custom_commands() {
let config_str = r#"export def "foo bar" [ let config_str = r#"export def "foo bar" [
p1: int p1: int
p2: string, p2: string, # doc
p3?: int = 1 # doc p3?: int = 1
] {}"#; ] {}"#;
let (client_connection, _recv) = initialize_language_server(Some(config_str), None); let (client_connection, _recv) = initialize_language_server(Some(config_str), None);
@ -308,11 +372,11 @@ mod tests {
actual: result_from_message(resp), actual: result_from_message(resp),
expected: serde_json::json!({ expected: serde_json::json!({
"signatures": [{ "signatures": [{
"label": " foo bar {flags} <p1> <p2> <p3?=1>", "label": "foo bar {flags} <p1> <p2> (p3)",
"parameters": [ "parameters": [
{"label": "p1", "documentation": {"value": ": `<int>` - "}}, {"label": "p1", "documentation": {"value": ": `<int>`"}},
{"label": "p2", "documentation": {"value": ": `<string>` - "}}, {"label": "p2", "documentation": {"value": ": `<string>` - doc"}},
{"label": "p3", "documentation": {"value": ": `<int>` - doc"}}, {"label": "p3", "documentation": {"value": ": `<int>` - (optional, default: `1`)"}},
], ],
"activeParameter": 1 "activeParameter": 1
}], }],
@ -326,11 +390,12 @@ mod tests {
actual: result_from_message(resp), actual: result_from_message(resp),
expected: serde_json::json!({ expected: serde_json::json!({
"signatures": [{ "signatures": [{
"label": " foo baz {flags} <p1> <p2> <p3?=1>", "label": "foo baz {flags} <p1> <p2> (p3)",
"parameters": [ "parameters": [
{"label": "p1", "documentation": {"value": ": `<int>` - "}}, {"label": "p1", "documentation": {"value": ": `<int>`"}},
{"label": "p2", "documentation": {"value": ": `<string>` - "}}, {"label": "p2", "documentation": {"value": ": `<string>` - doc"}},
{"label": "p3", "documentation": {"value": ": `<int>` - doc"}}, {"label": "p3", "documentation": {"value": ": `<int>` - (optional, default: `1`)"}},
{"label": "-h, --help", "documentation": {"value": " - Display the help message for this command"}},
], ],
"activeParameter": 2 "activeParameter": 2
}], }],

View File

@ -270,8 +270,8 @@ impl LanguageServer {
&mut self, &mut self,
params: &DocumentSymbolParams, params: &DocumentSymbolParams,
) -> Option<DocumentSymbolResponse> { ) -> Option<DocumentSymbolResponse> {
let engine_state = self.new_engine_state();
let uri = params.text_document.uri.to_owned(); let uri = params.text_document.uri.to_owned();
let engine_state = self.new_engine_state(Some(&uri));
let docs = self.docs.lock().ok()?; let docs = self.docs.lock().ok()?;
self.symbol_cache.update(&uri, &engine_state, &docs); self.symbol_cache.update(&uri, &engine_state, &docs);
self.symbol_cache self.symbol_cache
@ -284,7 +284,7 @@ impl LanguageServer {
params: &WorkspaceSymbolParams, params: &WorkspaceSymbolParams,
) -> Option<WorkspaceSymbolResponse> { ) -> Option<WorkspaceSymbolResponse> {
if self.symbol_cache.any_dirty() { if self.symbol_cache.any_dirty() {
let engine_state = self.new_engine_state(); let engine_state = self.new_engine_state(None);
let docs = self.docs.lock().ok()?; let docs = self.docs.lock().ok()?;
self.symbol_cache.update_all(&engine_state, &docs); self.symbol_cache.update_all(&engine_state, &docs);
} }

View File

@ -1,5 +1,5 @@
use crate::{ use crate::{
ast::{find_id, find_reference_by_id}, ast::{self, find_id, find_reference_by_id},
path_to_uri, span_to_range, uri_to_path, Id, LanguageServer, path_to_uri, span_to_range, uri_to_path, Id, LanguageServer,
}; };
use lsp_textdocument::FullTextDocument; use lsp_textdocument::FullTextDocument;
@ -46,6 +46,26 @@ fn find_nu_scripts_in_folder(folder_uri: &Uri) -> Result<nu_glob::Paths> {
nu_glob::glob(&pattern, Uninterruptible).into_diagnostic() nu_glob::glob(&pattern, Uninterruptible).into_diagnostic()
} }
/// HACK: when current file is imported (use keyword) by others in the workspace,
/// it will get parsed a second time via `parse_module_block`, so that its definitions'
/// ids are renewed, making it harder to track the references.
///
/// FIXME: cross-file shadowing can still cause false-positive/false-negative cases
///
/// This is a workaround to track the new id
struct IDTracker {
/// ID to search, renewed on `parse_module_block`
pub id: Id,
/// Span of the original instance under the cursor
pub span: Span,
/// Name of the definition
pub name: String,
/// Span of the original file where the request comes from
pub file_span: Span,
/// The redundant parsing should only happen once
pub renewed: bool,
}
impl LanguageServer { impl LanguageServer {
/// Get initial workspace folders from initialization response /// Get initial workspace folders from initialization response
pub(crate) fn initialize_workspace_folders( pub(crate) fn initialize_workspace_folders(
@ -66,12 +86,12 @@ impl LanguageServer {
&mut self, &mut self,
params: &DocumentHighlightParams, params: &DocumentHighlightParams,
) -> Option<Vec<DocumentHighlight>> { ) -> Option<Vec<DocumentHighlight>> {
let mut engine_state = self.new_engine_state();
let path_uri = params let path_uri = params
.text_document_position_params .text_document_position_params
.text_document .text_document
.uri .uri
.to_owned(); .to_owned();
let mut engine_state = self.new_engine_state(Some(&path_uri));
let (block, file_span, working_set) = let (block, file_span, working_set) =
self.parse_file(&mut engine_state, &path_uri, false)?; self.parse_file(&mut engine_state, &path_uri, false)?;
let docs = &self.docs.lock().ok()?; let docs = &self.docs.lock().ok()?;
@ -137,31 +157,38 @@ impl LanguageServer {
timeout: u128, timeout: u128,
) -> Option<Vec<Location>> { ) -> Option<Vec<Location>> {
self.occurrences = BTreeMap::new(); self.occurrences = BTreeMap::new();
let mut engine_state = self.new_engine_state();
let path_uri = params.text_document_position.text_document.uri.to_owned(); let path_uri = params.text_document_position.text_document.uri.to_owned();
let (_, id, span, _) = self let mut engine_state = self.new_engine_state(Some(&path_uri));
let (working_set, id, span, file_span) = self
.parse_and_find( .parse_and_find(
&mut engine_state, &mut engine_state,
&path_uri, &path_uri,
params.text_document_position.position, params.text_document_position.position,
) )
.ok()?; .ok()?;
// have to clone it again in order to move to another thread
let engine_state = self.new_engine_state();
let current_workspace_folder = self.get_workspace_folder_by_uri(&path_uri)?; let current_workspace_folder = self.get_workspace_folder_by_uri(&path_uri)?;
let token = params let token = params
.work_done_progress_params .work_done_progress_params
.work_done_token .work_done_token
.to_owned() .to_owned()
.unwrap_or(ProgressToken::Number(1)); .unwrap_or(ProgressToken::Number(1));
let id_tracker = IDTracker {
id,
span,
file_span,
name: String::from_utf8_lossy(working_set.get_span_contents(span)).to_string(),
renewed: false,
};
self.channels = self self.channels = self
.find_reference_in_workspace( .find_reference_in_workspace(
engine_state, engine_state,
current_workspace_folder, current_workspace_folder,
id,
span,
token.clone(), token.clone(),
"Finding references ...".to_string(), "Finding references ...".to_string(),
id_tracker,
) )
.ok(); .ok();
// TODO: WorkDoneProgress -> PartialResults for quicker response // TODO: WorkDoneProgress -> PartialResults for quicker response
@ -200,10 +227,10 @@ impl LanguageServer {
serde_json::from_value(request.params).into_diagnostic()?; serde_json::from_value(request.params).into_diagnostic()?;
self.occurrences = BTreeMap::new(); self.occurrences = BTreeMap::new();
let mut engine_state = self.new_engine_state();
let path_uri = params.text_document.uri.to_owned(); let path_uri = params.text_document.uri.to_owned();
let mut engine_state = self.new_engine_state(Some(&path_uri));
let (working_set, id, span, file_offset) = let (working_set, id, span, file_span) =
self.parse_and_find(&mut engine_state, &path_uri, params.position)?; self.parse_and_find(&mut engine_state, &path_uri, params.position)?;
if let Id::Value(_) = id { if let Id::Value(_) = id {
@ -222,7 +249,7 @@ impl LanguageServer {
let file = docs let file = docs
.get_document(&path_uri) .get_document(&path_uri)
.ok_or_else(|| miette!("\nFailed to get document"))?; .ok_or_else(|| miette!("\nFailed to get document"))?;
let range = span_to_range(&span, file, file_offset); let range = span_to_range(&span, file, file_span.start);
let response = PrepareRenameResponse::Range(range); let response = PrepareRenameResponse::Range(range);
self.connection self.connection
.sender .sender
@ -233,20 +260,24 @@ impl LanguageServer {
})) }))
.into_diagnostic()?; .into_diagnostic()?;
// have to clone it again in order to move to another thread
let engine_state = self.new_engine_state();
let current_workspace_folder = self let current_workspace_folder = self
.get_workspace_folder_by_uri(&path_uri) .get_workspace_folder_by_uri(&path_uri)
.ok_or_else(|| miette!("\nCurrent file is not in any workspace"))?; .ok_or_else(|| miette!("\nCurrent file is not in any workspace"))?;
// now continue parsing on other files in the workspace // now continue parsing on other files in the workspace
let id_tracker = IDTracker {
id,
span,
file_span,
name: String::from_utf8_lossy(working_set.get_span_contents(span)).to_string(),
renewed: false,
};
self.channels = self self.channels = self
.find_reference_in_workspace( .find_reference_in_workspace(
engine_state, engine_state,
current_workspace_folder, current_workspace_folder,
id,
span,
ProgressToken::Number(0), ProgressToken::Number(0),
"Preparing rename ...".to_string(), "Preparing rename ...".to_string(),
id_tracker,
) )
.ok(); .ok();
Ok(()) Ok(())
@ -256,7 +287,7 @@ impl LanguageServer {
working_set: &mut StateWorkingSet, working_set: &mut StateWorkingSet,
file: &FullTextDocument, file: &FullTextDocument,
fp: &Path, fp: &Path,
id: &Id, id_tracker: &mut IDTracker,
) -> Option<Vec<Span>> { ) -> Option<Vec<Span>> {
let block = nu_parser::parse( let block = nu_parser::parse(
working_set, working_set,
@ -264,7 +295,25 @@ impl LanguageServer {
file.get_content(None).as_bytes(), file.get_content(None).as_bytes(),
false, false,
); );
let references: Vec<Span> = find_reference_by_id(&block, working_set, id); // NOTE: Renew the id if there's a module with the same span as the original file.
// This requires that the initial parsing results get merged in the engine_state,
// typically they're cached with diagnostics before the prepare_rename/references requests,
// so that we don't need to clone and merge delta again.
if (!id_tracker.renewed)
&& working_set
.find_module_by_span(id_tracker.file_span)
.is_some()
{
if let Some(new_block) = working_set.find_block_by_span(id_tracker.file_span) {
if let Some((new_id, _)) =
ast::find_id(&new_block, working_set, &id_tracker.span.start)
{
id_tracker.id = new_id;
}
}
id_tracker.renewed = true;
}
let references: Vec<Span> = find_reference_by_id(&block, working_set, &id_tracker.id);
// add_block to avoid repeated parsing // add_block to avoid repeated parsing
working_set.add_block(block); working_set.add_block(block);
@ -304,10 +353,9 @@ impl LanguageServer {
&self, &self,
engine_state: EngineState, engine_state: EngineState,
current_workspace_folder: WorkspaceFolder, current_workspace_folder: WorkspaceFolder,
id: Id,
span: Span,
token: ProgressToken, token: ProgressToken,
message: String, message: String,
mut id_tracker: IDTracker,
) -> Result<( ) -> Result<(
crossbeam_channel::Sender<bool>, crossbeam_channel::Sender<bool>,
Arc<crossbeam_channel::Receiver<InternalMessage>>, Arc<crossbeam_channel::Receiver<InternalMessage>>,
@ -333,7 +381,7 @@ impl LanguageServer {
.filter_map(|p| p.ok()) .filter_map(|p| p.ok())
.collect(); .collect();
let len = scripts.len(); let len = scripts.len();
let definition_span = Self::find_definition_span_by_id(&working_set, &id); let definition_span = Self::find_definition_span_by_id(&working_set, &id_tracker.id);
for (i, fp) in scripts.iter().enumerate() { for (i, fp) in scripts.iter().enumerate() {
#[cfg(test)] #[cfg(test)]
@ -363,9 +411,7 @@ impl LanguageServer {
}; };
// skip if the file does not contain what we're looking for // skip if the file does not contain what we're looking for
let content_string = String::from_utf8_lossy(&bytes); let content_string = String::from_utf8_lossy(&bytes);
let text_to_search = if !content_string.contains(&id_tracker.name) {
String::from_utf8_lossy(working_set.get_span_contents(span));
if !content_string.contains(text_to_search.as_ref()) {
// progress without any data // progress without any data
data_sender data_sender
.send(InternalMessage::OnGoing(token.clone(), percentage)) .send(InternalMessage::OnGoing(token.clone(), percentage))
@ -374,17 +420,17 @@ impl LanguageServer {
} }
&FullTextDocument::new("nu".to_string(), 0, content_string.into()) &FullTextDocument::new("nu".to_string(), 0, content_string.into())
}; };
let _ = Self::find_reference_in_file(&mut working_set, file, fp, &id).map( let _ = Self::find_reference_in_file(&mut working_set, file, fp, &mut id_tracker)
|mut refs| { .map(|mut refs| {
let file_span = working_set let file_span = working_set
.get_span_for_filename(fp.to_string_lossy().as_ref()) .get_span_for_filename(fp.to_string_lossy().as_ref())
.unwrap_or(Span::unknown()); .unwrap_or(Span::unknown());
if let Some(extra_span) = Self::reference_not_in_ast( if let Some(extra_span) = Self::reference_not_in_ast(
&id, &id_tracker.id,
&working_set, &working_set,
definition_span, definition_span,
file_span, file_span,
span, id_tracker.span,
) { ) {
if !refs.contains(&extra_span) { if !refs.contains(&extra_span) {
refs.push(extra_span) refs.push(extra_span)
@ -400,8 +446,7 @@ impl LanguageServer {
data_sender data_sender
.send(InternalMessage::OnGoing(token.clone(), percentage)) .send(InternalMessage::OnGoing(token.clone(), percentage))
.ok(); .ok();
}, });
);
} }
data_sender data_sender
.send(InternalMessage::Finished(token.clone())) .send(InternalMessage::Finished(token.clone()))

View File

@ -2678,7 +2678,7 @@ pub fn parse_unit_value<'res>(
if let Some((unit, name, convert)) = unit_groups.iter().find(|x| value.ends_with(x.1)) { if let Some((unit, name, convert)) = unit_groups.iter().find(|x| value.ends_with(x.1)) {
let lhs_len = value.len() - name.len(); let lhs_len = value.len() - name.len();
let lhs = strip_underscores(value[..lhs_len].as_bytes()); let lhs = strip_underscores(&value.as_bytes()[..lhs_len]);
let lhs_span = Span::new(span.start, span.start + lhs_len); let lhs_span = Span::new(span.start, span.start + lhs_len);
let unit_span = Span::new(span.start + lhs_len, span.end); let unit_span = Span::new(span.start + lhs_len, span.end);
if lhs.ends_with('$') { if lhs.ends_with('$') {
@ -2784,7 +2784,7 @@ pub const FILESIZE_UNIT_GROUPS: &[UnitGroup] = &[
( (
Unit::Filesize(FilesizeUnit::EiB), Unit::Filesize(FilesizeUnit::EiB),
"EIB", "EIB",
Some((Unit::Filesize(FilesizeUnit::EiB), 1024)), Some((Unit::Filesize(FilesizeUnit::PiB), 1024)),
), ),
(Unit::Filesize(FilesizeUnit::B), "B", None), (Unit::Filesize(FilesizeUnit::B), "B", None),
]; ];

View File

@ -46,7 +46,7 @@ pub fn expand_ndots(path: impl AsRef<Path>) -> PathBuf {
pub fn expand_dots(path: impl AsRef<Path>) -> PathBuf { pub fn expand_dots(path: impl AsRef<Path>) -> PathBuf {
// Check if the last component of the path is a normal component. // Check if the last component of the path is a normal component.
fn last_component_is_normal(path: &Path) -> bool { fn last_component_is_normal(path: &Path) -> bool {
matches!(path.components().last(), Some(Component::Normal(_))) matches!(path.components().next_back(), Some(Component::Normal(_)))
} }
let path = path.as_ref(); let path = path.as_ref();
@ -61,7 +61,7 @@ pub fn expand_dots(path: impl AsRef<Path>) -> PathBuf {
// no-op // no-op
} }
_ => { _ => {
let prev_component = result.components().last(); let prev_component = result.components().next_back();
if prev_component == Some(Component::RootDir) && component == Component::ParentDir { if prev_component == Some(Component::RootDir) && component == Component::ParentDir {
continue; continue;
} }

View File

@ -29,7 +29,7 @@ fn expand_tilde_with_home(path: impl AsRef<Path>, home: Option<PathBuf>) -> Path
}; };
} }
let path_last_char = path.as_os_str().to_string_lossy().chars().last(); let path_last_char = path.as_os_str().to_string_lossy().chars().next_back();
let need_trailing_slash = path_last_char == Some('/') || path_last_char == Some('\\'); let need_trailing_slash = path_last_char == Some('/') || path_last_char == Some('\\');
match home { match home {
@ -94,7 +94,7 @@ fn user_home_dir(username: &str) -> PathBuf {
if !cfg!(target_os = "android") if !cfg!(target_os = "android")
&& expected_path && expected_path
.components() .components()
.last() .next_back()
.map(|last| last != Component::Normal(username.as_ref())) .map(|last| last != Component::Normal(username.as_ref()))
.unwrap_or(false) .unwrap_or(false)
{ {

View File

@ -884,7 +884,7 @@ impl<'a> StateWorkingSet<'a> {
.active_overlay_names(&mut removed_overlays) .active_overlay_names(&mut removed_overlays)
.iter() .iter()
.rev() .rev()
.last() .next_back()
{ {
return last_name; return last_name;
} }
@ -900,7 +900,7 @@ impl<'a> StateWorkingSet<'a> {
if let Some(last_overlay) = scope_frame if let Some(last_overlay) = scope_frame
.active_overlays(&mut removed_overlays) .active_overlays(&mut removed_overlays)
.rev() .rev()
.last() .next_back()
{ {
return last_overlay; return last_overlay;
} }

View File

@ -17,6 +17,8 @@ use std::{
}; };
/// Create a Value for `$nu`. /// Create a Value for `$nu`.
// Note: When adding new constants to $nu, please update the doc at https://nushell.sh/book/special_variables.html
// or at least add a TODO/reminder issue in nushell.github.io so we don't lose track of it.
pub(crate) fn create_nu_constant(engine_state: &EngineState, span: Span) -> Value { pub(crate) fn create_nu_constant(engine_state: &EngineState, span: Span) -> Value {
fn canonicalize_path(engine_state: &EngineState, path: &Path) -> PathBuf { fn canonicalize_path(engine_state: &EngineState, path: &Path) -> PathBuf {
#[allow(deprecated)] #[allow(deprecated)]

View File

@ -88,7 +88,7 @@ pub fn lev_distance_with_substrings(a: &str, b: &str, limit: usize) -> Option<us
1 // Exact substring match, but not a total word match so return non-zero 1 // Exact substring match, but not a total word match so return non-zero
} else if !big_len_diff { } else if !big_len_diff {
// Not a big difference in length, discount cost of length difference // Not a big difference in length, discount cost of length difference
score + (len_diff + 1) / 2 score + len_diff.div_ceil(2)
} else { } else {
// A big difference in length, add back the difference in length to the score // A big difference in length, add back the difference in length to the score
score + len_diff score + len_diff

View File

@ -133,7 +133,7 @@ pub trait FromValue: Sized {
Type::Custom( Type::Custom(
any::type_name::<Self>() any::type_name::<Self>()
.split(':') .split(':')
.last() .next_back()
.expect("str::split returns an iterator with at least one element") .expect("str::split returns an iterator with at least one element")
.to_string() .to_string()
.into_boxed_str(), .into_boxed_str(),

View File

@ -119,7 +119,7 @@ fn build_vertical_map(record: Record, config: &Config) -> TableValue {
fn string_append_to_width(key: &mut String, max: usize) { fn string_append_to_width(key: &mut String, max: usize) {
let width = string_width(key); let width = string_width(key);
let rest = max - width; let rest = max - width;
key.extend(std::iter::repeat(' ').take(rest)); key.extend(std::iter::repeat_n(' ', rest));
} }
fn build_vertical_array(vals: Vec<Value>, config: &Config) -> TableValue { fn build_vertical_array(vals: Vec<Value>, config: &Config) -> TableValue {

View File

@ -93,7 +93,7 @@
use std::cmp::max; use std::cmp::max;
use std::fmt; use std::fmt;
use std::iter::repeat; use std::iter::repeat_n;
use unicode_width::UnicodeWidthStr; use unicode_width::UnicodeWidthStr;
fn unicode_width_strip_ansi(astring: &str) -> usize { fn unicode_width_strip_ansi(astring: &str) -> usize {
@ -290,7 +290,7 @@ impl Grid {
} }
fn column_widths(&self, num_lines: usize, num_columns: usize) -> Dimensions { fn column_widths(&self, num_lines: usize, num_columns: usize) -> Dimensions {
let mut widths: Vec<Width> = repeat(0).take(num_columns).collect(); let mut widths: Vec<Width> = repeat_n(0, num_columns).collect();
for (index, cell) in self.cells.iter().enumerate() { for (index, cell) in self.cells.iter().enumerate() {
let index = match self.options.direction { let index = match self.options.direction {
Direction::LeftToRight => index % num_columns, Direction::LeftToRight => index % num_columns,

View File

@ -317,7 +317,7 @@ impl NuDataFrame {
let series = self.as_series(span)?; let series = self.as_series(span)?;
let column = conversion::create_column_from_series(&series, row, row + 1, span)?; let column = conversion::create_column_from_series(&series, row, row + 1, span)?;
if column.len() == 0 { if column.is_empty() {
Err(ShellError::AccessEmptyContent { span }) Err(ShellError::AccessEmptyContent { span })
} else { } else {
let value = column let value = column

View File

@ -1,6 +1,6 @@
config n config n
config n foo bar - config n foo bar -
config n foo bar l --l config n foo bar c --l
# detail # detail
def "config n foo bar" [ def "config n foo bar" [

View File

@ -3,6 +3,6 @@ let greeting = "Hello"
echo $gre echo $gre
| st | st
ls l ls c
$greeting not-h $greeting not-h

View File

@ -11,7 +11,7 @@ foo bar 1 2 3
foo baz 1 2 3 foo baz 1 2 3
def "foo baz" [ def "foo baz" [
p1: int p1: int
p2: string, p2: string, # doc
p3?: int = 1 # doc p3?: int = 1
] {} ] {}
echo echo