mirror of
https://github.com/nushell/nushell.git
synced 2025-04-29 15:44:28 +02:00
Merge branch 'main' into polars_categorical_2
This commit is contained in:
commit
705bc342fb
8
Cargo.lock
generated
8
Cargo.lock
generated
@ -4488,9 +4488,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "openssl"
|
||||
version = "0.10.70"
|
||||
version = "0.10.72"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6"
|
||||
checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"cfg-if",
|
||||
@ -4529,9 +4529,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "openssl-sys"
|
||||
version = "0.9.105"
|
||||
version = "0.9.107"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc"
|
||||
checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
|
@ -150,7 +150,7 @@ rusqlite = "0.31"
|
||||
rust-embed = "8.6.0"
|
||||
scopeguard = { version = "1.2.0" }
|
||||
serde = { version = "1.0" }
|
||||
serde_json = "1.0"
|
||||
serde_json = "1.0.97"
|
||||
serde_urlencoded = "0.7.1"
|
||||
serde_yaml = "0.9.33"
|
||||
sha2 = "0.10"
|
||||
|
@ -135,7 +135,7 @@ where
|
||||
(min, max) => (rhs, lhs, max, min),
|
||||
};
|
||||
|
||||
let pad = iter::repeat(0).take(max_len - min_len);
|
||||
let pad = iter::repeat_n(0, max_len - min_len);
|
||||
|
||||
let mut a;
|
||||
let mut b;
|
||||
@ -159,9 +159,10 @@ where
|
||||
}
|
||||
(Value::Binary { .. }, Value::Int { .. }) | (Value::Int { .. }, Value::Binary { .. }) => {
|
||||
Value::error(
|
||||
ShellError::PipelineMismatch {
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "input, and argument, to be both int or both binary"
|
||||
.to_string(),
|
||||
wrong_type: "int and binary".to_string(),
|
||||
dst_span: rhs.span(),
|
||||
src_span: span,
|
||||
},
|
||||
|
@ -249,7 +249,7 @@ fn shift_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize) -
|
||||
Last | Only => lhs << bit_shift,
|
||||
_ => (lhs << bit_shift) | (rhs >> (8 - bit_shift)),
|
||||
})
|
||||
.chain(iter::repeat(0).take(byte_shift))
|
||||
.chain(iter::repeat_n(0, byte_shift))
|
||||
.collect::<Vec<u8>>()
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
use crate::{generate_strftime_list, parse_date_from_string};
|
||||
use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, TimeZone, Utc};
|
||||
use human_date_parser::{from_human_time, ParseResult};
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
@ -98,11 +97,6 @@ impl Command for IntoDatetime {
|
||||
"Show all possible variables for use in --format flag",
|
||||
Some('l'),
|
||||
)
|
||||
.switch(
|
||||
"list-human",
|
||||
"Show human-readable datetime parsing examples",
|
||||
Some('n'),
|
||||
)
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
@ -120,8 +114,6 @@ impl Command for IntoDatetime {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
if call.has_flag(engine_state, stack, "list")? {
|
||||
Ok(generate_strftime_list(call.head, true).into_pipeline_data())
|
||||
} else if call.has_flag(engine_state, stack, "list-human")? {
|
||||
Ok(list_human_readable_examples(call.head).into_pipeline_data())
|
||||
} else {
|
||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||
@ -256,21 +248,6 @@ impl Command for IntoDatetime {
|
||||
Span::test_data(),
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "Parsing human readable datetimes",
|
||||
example: "'Today at 18:30' | into datetime",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Parsing human readable datetimes",
|
||||
example: "'Last Friday at 19:45' | into datetime",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Parsing human readable datetimes",
|
||||
example: "'In 5 minutes and 30 seconds' | into datetime",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -291,60 +268,9 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
|
||||
if matches!(input, Value::String { .. }) && dateformat.is_none() {
|
||||
let span = input.span();
|
||||
if let Ok(input_val) = input.coerce_str() {
|
||||
match parse_date_from_string(&input_val, span) {
|
||||
Ok(date) => return Value::date(date, span),
|
||||
Err(_) => {
|
||||
if let Ok(date) = from_human_time(&input_val, Local::now().naive_local()) {
|
||||
match date {
|
||||
ParseResult::Date(date) => {
|
||||
let time = Local::now().time();
|
||||
let combined = date.and_time(time);
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed =
|
||||
TimeZone::from_local_datetime(&local_offset, &combined)
|
||||
.single()
|
||||
.unwrap_or_default();
|
||||
return Value::date(dt_fixed, span);
|
||||
if let Ok(date) = parse_date_from_string(&input_val, span) {
|
||||
return Value::date(date, span);
|
||||
}
|
||||
ParseResult::DateTime(date) => {
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed = match local_offset.from_local_datetime(&date) {
|
||||
chrono::LocalResult::Single(dt) => dt,
|
||||
chrono::LocalResult::Ambiguous(_, _) => {
|
||||
return Value::error(
|
||||
ShellError::DatetimeParseError {
|
||||
msg: "Ambiguous datetime".to_string(),
|
||||
span,
|
||||
},
|
||||
span,
|
||||
);
|
||||
}
|
||||
chrono::LocalResult::None => {
|
||||
return Value::error(
|
||||
ShellError::DatetimeParseError {
|
||||
msg: "Invalid datetime".to_string(),
|
||||
span,
|
||||
},
|
||||
span,
|
||||
);
|
||||
}
|
||||
};
|
||||
return Value::date(dt_fixed, span);
|
||||
}
|
||||
ParseResult::Time(time) => {
|
||||
let date = Local::now().date_naive();
|
||||
let combined = date.and_time(time);
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed =
|
||||
TimeZone::from_local_datetime(&local_offset, &combined)
|
||||
.single()
|
||||
.unwrap_or_default();
|
||||
return Value::date(dt_fixed, span);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ -524,44 +450,6 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
|
||||
}
|
||||
}
|
||||
|
||||
fn list_human_readable_examples(span: Span) -> Value {
|
||||
let examples: Vec<String> = vec![
|
||||
"Today 18:30".into(),
|
||||
"2022-11-07 13:25:30".into(),
|
||||
"15:20 Friday".into(),
|
||||
"This Friday 17:00".into(),
|
||||
"13:25, Next Tuesday".into(),
|
||||
"Last Friday at 19:45".into(),
|
||||
"In 3 days".into(),
|
||||
"In 2 hours".into(),
|
||||
"10 hours and 5 minutes ago".into(),
|
||||
"1 years ago".into(),
|
||||
"A year ago".into(),
|
||||
"A month ago".into(),
|
||||
"A week ago".into(),
|
||||
"A day ago".into(),
|
||||
"An hour ago".into(),
|
||||
"A minute ago".into(),
|
||||
"A second ago".into(),
|
||||
"Now".into(),
|
||||
];
|
||||
|
||||
let records = examples
|
||||
.iter()
|
||||
.map(|s| {
|
||||
Value::record(
|
||||
record! {
|
||||
"parseable human datetime examples" => Value::test_string(s.to_string()),
|
||||
"result" => action(&Value::test_string(s.to_string()), &Arguments { zone_options: None, format_options: None, cell_paths: None }, span)
|
||||
},
|
||||
span,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<Value>>();
|
||||
|
||||
Value::list(records, span)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@ -593,14 +481,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn takes_a_date_format_without_timezone() {
|
||||
// Ignoring this test for now because we changed the human-date-parser to use
|
||||
// the users timezone instead of UTC. We may continue to tweak this behavior.
|
||||
// Another hacky solution is to set the timezone to UTC in the test, which works
|
||||
// on MacOS and Linux but hasn't been tested on Windows. Plus it kind of defeats
|
||||
// the purpose of a "without_timezone" test.
|
||||
// std::env::set_var("TZ", "UTC");
|
||||
let date_str = Value::test_string("16.11.1984 8:00 am");
|
||||
let fmt_options = Some(DatetimeFormat("%d.%m.%Y %H:%M %P".to_string()));
|
||||
let args = Arguments {
|
||||
|
@ -40,6 +40,7 @@ impl Command for SplitCellPath {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let input_type = input.get_type();
|
||||
|
||||
let src_span = match input {
|
||||
// Early return on correct type and empty pipeline
|
||||
@ -54,8 +55,9 @@ impl Command for SplitCellPath {
|
||||
PipelineData::ListStream(stream, ..) => stream.span(),
|
||||
PipelineData::ByteStream(stream, ..) => stream.span(),
|
||||
};
|
||||
Err(ShellError::PipelineMismatch {
|
||||
Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "cell-path".into(),
|
||||
wrong_type: input_type.to_string(),
|
||||
dst_span: head,
|
||||
src_span,
|
||||
})
|
||||
|
259
crates/nu-command/src/date/from_human.rs
Normal file
259
crates/nu-command/src/date/from_human.rs
Normal file
@ -0,0 +1,259 @@
|
||||
use chrono::{Local, TimeZone};
|
||||
use human_date_parser::{from_human_time, ParseResult};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DateFromHuman;
|
||||
|
||||
impl Command for DateFromHuman {
|
||||
fn name(&self) -> &str {
|
||||
"date from-human"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("date from-human")
|
||||
.input_output_types(vec![
|
||||
(Type::String, Type::Date),
|
||||
(Type::Nothing, Type::table()),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.switch(
|
||||
"list",
|
||||
"Show human-readable datetime parsing examples",
|
||||
Some('l'),
|
||||
)
|
||||
.category(Category::Date)
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Convert a human readable datetime string to a datetime."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec![
|
||||
"relative",
|
||||
"now",
|
||||
"today",
|
||||
"tomorrow",
|
||||
"yesterday",
|
||||
"weekday",
|
||||
"weekday_name",
|
||||
"timezone",
|
||||
]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
if call.has_flag(engine_state, stack, "list")? {
|
||||
return Ok(list_human_readable_examples(call.head).into_pipeline_data());
|
||||
}
|
||||
let head = call.head;
|
||||
// This doesn't match explicit nulls
|
||||
if matches!(input, PipelineData::Empty) {
|
||||
return Err(ShellError::PipelineEmpty { dst_span: head });
|
||||
}
|
||||
input.map(move |value| helper(value, head), engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Parsing human readable datetime",
|
||||
example: "'Today at 18:30' | date from-human",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Parsing human readable datetime",
|
||||
example: "'Last Friday at 19:45' | date from-human",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Parsing human readable datetime",
|
||||
example: "'In 5 minutes and 30 seconds' | date from-human",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "PShow human-readable datetime parsing examples",
|
||||
example: "date from-human --list",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
fn helper(value: Value, head: Span) -> Value {
|
||||
let span = value.span();
|
||||
let input_val = match value {
|
||||
Value::String { val, .. } => val,
|
||||
other => {
|
||||
return Value::error(
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".to_string(),
|
||||
wrong_type: other.get_type().to_string(),
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
},
|
||||
span,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(date) = from_human_time(&input_val, Local::now().naive_local()) {
|
||||
match date {
|
||||
ParseResult::Date(date) => {
|
||||
let time = Local::now().time();
|
||||
let combined = date.and_time(time);
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed = TimeZone::from_local_datetime(&local_offset, &combined)
|
||||
.single()
|
||||
.unwrap_or_default();
|
||||
return Value::date(dt_fixed, span);
|
||||
}
|
||||
ParseResult::DateTime(date) => {
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed = match local_offset.from_local_datetime(&date) {
|
||||
chrono::LocalResult::Single(dt) => dt,
|
||||
chrono::LocalResult::Ambiguous(_, _) => {
|
||||
return Value::error(
|
||||
ShellError::DatetimeParseError {
|
||||
msg: "Ambiguous datetime".to_string(),
|
||||
span,
|
||||
},
|
||||
span,
|
||||
);
|
||||
}
|
||||
chrono::LocalResult::None => {
|
||||
return Value::error(
|
||||
ShellError::DatetimeParseError {
|
||||
msg: "Invalid datetime".to_string(),
|
||||
span,
|
||||
},
|
||||
span,
|
||||
);
|
||||
}
|
||||
};
|
||||
return Value::date(dt_fixed, span);
|
||||
}
|
||||
ParseResult::Time(time) => {
|
||||
let date = Local::now().date_naive();
|
||||
let combined = date.and_time(time);
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed = TimeZone::from_local_datetime(&local_offset, &combined)
|
||||
.single()
|
||||
.unwrap_or_default();
|
||||
return Value::date(dt_fixed, span);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match from_human_time(&input_val, Local::now().naive_local()) {
|
||||
Ok(date) => match date {
|
||||
ParseResult::Date(date) => {
|
||||
let time = Local::now().time();
|
||||
let combined = date.and_time(time);
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed = TimeZone::from_local_datetime(&local_offset, &combined)
|
||||
.single()
|
||||
.unwrap_or_default();
|
||||
Value::date(dt_fixed, span)
|
||||
}
|
||||
ParseResult::DateTime(date) => {
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed = match local_offset.from_local_datetime(&date) {
|
||||
chrono::LocalResult::Single(dt) => dt,
|
||||
chrono::LocalResult::Ambiguous(_, _) => {
|
||||
return Value::error(
|
||||
ShellError::DatetimeParseError {
|
||||
msg: "Ambiguous datetime".to_string(),
|
||||
span,
|
||||
},
|
||||
span,
|
||||
);
|
||||
}
|
||||
chrono::LocalResult::None => {
|
||||
return Value::error(
|
||||
ShellError::DatetimeParseError {
|
||||
msg: "Invalid datetime".to_string(),
|
||||
span,
|
||||
},
|
||||
span,
|
||||
);
|
||||
}
|
||||
};
|
||||
Value::date(dt_fixed, span)
|
||||
}
|
||||
ParseResult::Time(time) => {
|
||||
let date = Local::now().date_naive();
|
||||
let combined = date.and_time(time);
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed = TimeZone::from_local_datetime(&local_offset, &combined)
|
||||
.single()
|
||||
.unwrap_or_default();
|
||||
Value::date(dt_fixed, span)
|
||||
}
|
||||
},
|
||||
Err(_) => Value::error(
|
||||
ShellError::IncorrectValue {
|
||||
msg: "Cannot parse as humanized date".to_string(),
|
||||
val_span: head,
|
||||
call_span: span,
|
||||
},
|
||||
span,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn list_human_readable_examples(span: Span) -> Value {
|
||||
let examples: Vec<String> = vec![
|
||||
"Today 18:30".into(),
|
||||
"2022-11-07 13:25:30".into(),
|
||||
"15:20 Friday".into(),
|
||||
"This Friday 17:00".into(),
|
||||
"13:25, Next Tuesday".into(),
|
||||
"Last Friday at 19:45".into(),
|
||||
"In 3 days".into(),
|
||||
"In 2 hours".into(),
|
||||
"10 hours and 5 minutes ago".into(),
|
||||
"1 years ago".into(),
|
||||
"A year ago".into(),
|
||||
"A month ago".into(),
|
||||
"A week ago".into(),
|
||||
"A day ago".into(),
|
||||
"An hour ago".into(),
|
||||
"A minute ago".into(),
|
||||
"A second ago".into(),
|
||||
"Now".into(),
|
||||
];
|
||||
|
||||
let records = examples
|
||||
.iter()
|
||||
.map(|s| {
|
||||
Value::record(
|
||||
record! {
|
||||
"parseable human datetime examples" => Value::test_string(s.to_string()),
|
||||
"result" => helper(Value::test_string(s.to_string()), span),
|
||||
},
|
||||
span,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<Value>>();
|
||||
|
||||
Value::list(records, span)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(DateFromHuman {})
|
||||
}
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
mod date_;
|
||||
mod from_human;
|
||||
mod humanize;
|
||||
mod list_timezone;
|
||||
mod now;
|
||||
@ -7,6 +8,7 @@ mod to_timezone;
|
||||
mod utils;
|
||||
|
||||
pub use date_::Date;
|
||||
pub use from_human::DateFromHuman;
|
||||
pub use humanize::DateHumanize;
|
||||
pub use list_timezone::DateListTimezones;
|
||||
pub use now::DateNow;
|
||||
|
@ -118,7 +118,7 @@ fn increase_string_width(text: &mut String, total: usize) {
|
||||
let rest = total - width;
|
||||
|
||||
if rest > 0 {
|
||||
text.extend(std::iter::repeat(' ').take(rest));
|
||||
text.extend(std::iter::repeat_n(' ', rest));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -272,6 +272,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||
// Date
|
||||
bind_command! {
|
||||
Date,
|
||||
DateFromHuman,
|
||||
DateHumanize,
|
||||
DateListTimezones,
|
||||
DateNow,
|
||||
|
@ -378,10 +378,7 @@ fn ls_for_one_pattern(
|
||||
.par_bridge()
|
||||
.filter_map(move |x| match x {
|
||||
Ok(path) => {
|
||||
let metadata = match std::fs::symlink_metadata(&path) {
|
||||
Ok(metadata) => Some(metadata),
|
||||
Err(_) => None,
|
||||
};
|
||||
let metadata = std::fs::symlink_metadata(&path).ok();
|
||||
let hidden_dir_clone = Arc::clone(&hidden_dirs);
|
||||
let mut hidden_dir_mutex = hidden_dir_clone
|
||||
.lock()
|
||||
|
@ -243,7 +243,7 @@ mod test {
|
||||
let chunks = chunk_read.map(|e| e.unwrap()).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
chunks,
|
||||
[s[..4].as_bytes(), s[4..8].as_bytes(), s[8..].as_bytes()]
|
||||
[&s.as_bytes()[..4], &s.as_bytes()[4..8], &s.as_bytes()[8..]]
|
||||
);
|
||||
}
|
||||
|
||||
@ -260,7 +260,7 @@ mod test {
|
||||
let chunks = chunk_read.map(|e| e.unwrap()).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
chunks,
|
||||
[s[..4].as_bytes(), s[4..8].as_bytes(), s[8..].as_bytes()]
|
||||
[&s.as_bytes()[..4], &s.as_bytes()[4..8], &s.as_bytes()[8..]]
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -42,8 +42,9 @@ pub(crate) fn typecheck_merge(lhs: &Value, rhs: &Value, head: Span) -> Result<()
|
||||
match (lhs.get_type(), rhs.get_type()) {
|
||||
(Type::Record { .. }, Type::Record { .. }) => Ok(()),
|
||||
(_, _) if is_list_of_records(lhs) && is_list_of_records(rhs) => Ok(()),
|
||||
_ => Err(ShellError::PipelineMismatch {
|
||||
other => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "input and argument to be both record or both table".to_string(),
|
||||
wrong_type: format!("{} and {}", other.0, other.1).to_string(),
|
||||
dst_span: head,
|
||||
src_span: lhs.span(),
|
||||
}),
|
||||
|
@ -174,8 +174,9 @@ impl Command for Move {
|
||||
PipelineData::Value(Value::Record { val, .. }, ..) => {
|
||||
Ok(move_record_columns(&val, &columns, &location, head)?.into_pipeline_data())
|
||||
}
|
||||
_ => Err(ShellError::PipelineMismatch {
|
||||
other => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "record or table".to_string(),
|
||||
wrong_type: other.get_type().to_string(),
|
||||
dst_span: head,
|
||||
src_span: Span::new(head.start, head.start),
|
||||
}),
|
||||
|
@ -184,9 +184,10 @@ impl Command for Sort {
|
||||
dst_span: value.span(),
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::PipelineMismatch {
|
||||
ref other => {
|
||||
return Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "record or list".to_string(),
|
||||
wrong_type: other.get_type().to_string(),
|
||||
dst_span: call.head,
|
||||
src_span: value.span(),
|
||||
})
|
||||
|
@ -102,7 +102,7 @@ pub fn calculate(
|
||||
mf(&new_vals?, span, name)
|
||||
}
|
||||
PipelineData::Value(val, ..) => mf(&[val], span, name),
|
||||
PipelineData::Empty { .. } => Err(ShellError::PipelineEmpty { dst_span: name }),
|
||||
PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: name }),
|
||||
val => Err(ShellError::UnsupportedInput {
|
||||
msg: "Only ints, floats, lists, records, or ranges are supported".into(),
|
||||
input: "value originates from here".into(),
|
||||
|
@ -723,7 +723,7 @@ fn transform_response_using_content_type(
|
||||
)
|
||||
})?
|
||||
.path_segments()
|
||||
.and_then(|segments| segments.last())
|
||||
.and_then(|mut segments| segments.next_back())
|
||||
.and_then(|name| if name.is_empty() { None } else { Some(name) })
|
||||
.and_then(|name| {
|
||||
PathBuf::from(name)
|
||||
|
@ -175,7 +175,7 @@ fn run(call: &Call, args: &Arguments, input: PipelineData) -> Result<PipelineDat
|
||||
handle_value(stream.into_value(), args, head),
|
||||
metadata,
|
||||
)),
|
||||
PipelineData::Empty { .. } => Err(ShellError::PipelineEmpty { dst_span: head }),
|
||||
PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: head }),
|
||||
_ => Err(ShellError::UnsupportedInput {
|
||||
msg: "Input value cannot be joined".to_string(),
|
||||
input: "value originates from here".into(),
|
||||
@ -221,14 +221,21 @@ fn join_list(parts: &[Value], head: Span, span: Span, args: &Arguments) -> Value
|
||||
|
||||
Value::list(vals, span)
|
||||
}
|
||||
Err(_) => Value::error(
|
||||
ShellError::PipelineMismatch {
|
||||
Err(ShellError::CantConvert { from_type, .. }) => Value::error(
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string or record".into(),
|
||||
wrong_type: from_type,
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
},
|
||||
span,
|
||||
),
|
||||
Err(_) => Value::error(
|
||||
ShellError::NushellFailed {
|
||||
msg: "failed to join path".into(),
|
||||
},
|
||||
span,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -51,21 +51,11 @@ fn handle_invalid_values(rest: Value, name: Span) -> Value {
|
||||
fn err_from_value(rest: &Value, name: Span) -> ShellError {
|
||||
match rest {
|
||||
Value::Error { error, .. } => *error.clone(),
|
||||
_ => {
|
||||
if rest.is_nothing() {
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
_ => ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string, record or list".into(),
|
||||
wrong_type: "nothing".into(),
|
||||
dst_span: name,
|
||||
src_span: rest.span(),
|
||||
}
|
||||
} else {
|
||||
ShellError::PipelineMismatch {
|
||||
exp_input_type: "string, row or list".into(),
|
||||
wrong_type: rest.get_type().to_string(),
|
||||
dst_span: name,
|
||||
src_span: rest.span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -181,8 +181,11 @@ fn operate(
|
||||
Value::List { vals, .. } => {
|
||||
let iter = vals.into_iter().map(move |val| {
|
||||
let span = val.span();
|
||||
val.into_string().map_err(|_| ShellError::PipelineMismatch {
|
||||
let type_ = val.get_type();
|
||||
val.into_string()
|
||||
.map_err(|_| ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".into(),
|
||||
wrong_type: type_.to_string(),
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
})
|
||||
@ -199,8 +202,9 @@ fn operate(
|
||||
|
||||
Ok(ListStream::new(iter, head, Signals::empty()).into())
|
||||
}
|
||||
value => Err(ShellError::PipelineMismatch {
|
||||
value => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".into(),
|
||||
wrong_type: value.get_type().to_string(),
|
||||
dst_span: head,
|
||||
src_span: value.span(),
|
||||
}),
|
||||
|
@ -153,8 +153,9 @@ fn split_chars_helper(v: &Value, name: Span, graphemes: bool) -> Value {
|
||||
)
|
||||
} else {
|
||||
Value::error(
|
||||
ShellError::PipelineMismatch {
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".into(),
|
||||
wrong_type: v.get_type().to_string(),
|
||||
dst_span: name,
|
||||
src_span: v_span,
|
||||
},
|
||||
|
@ -255,8 +255,9 @@ fn split_column_helper(
|
||||
v => {
|
||||
let span = v.span();
|
||||
vec![Value::error(
|
||||
ShellError::PipelineMismatch {
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".into(),
|
||||
wrong_type: v.get_type().to_string(),
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
},
|
||||
|
@ -219,8 +219,9 @@ fn split_row_helper(v: &Value, regex: &Regex, max_split: Option<usize>, name: Sp
|
||||
}
|
||||
} else {
|
||||
vec![Value::error(
|
||||
ShellError::PipelineMismatch {
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".into(),
|
||||
wrong_type: v.get_type().to_string(),
|
||||
dst_span: name,
|
||||
src_span: v_span,
|
||||
},
|
||||
|
@ -226,8 +226,9 @@ fn split_words_helper(v: &Value, word_length: Option<usize>, span: Span, graphem
|
||||
Value::list(words, v_span)
|
||||
} else {
|
||||
Value::error(
|
||||
ShellError::PipelineMismatch {
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".into(),
|
||||
wrong_type: v.get_type().to_string(),
|
||||
dst_span: span,
|
||||
src_span: v_span,
|
||||
},
|
||||
|
@ -237,14 +237,16 @@ fn run(
|
||||
input.map(
|
||||
move |v| {
|
||||
let value_span = v.span();
|
||||
let type_ = v.get_type();
|
||||
match v.coerce_into_string() {
|
||||
Ok(s) => {
|
||||
let contents = if is_path { s.replace('\\', "\\\\") } else { s };
|
||||
str_expand(&contents, span, value_span)
|
||||
}
|
||||
Err(_) => Value::error(
|
||||
ShellError::PipelineMismatch {
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".into(),
|
||||
wrong_type: type_.to_string(),
|
||||
dst_span: span,
|
||||
src_span: value_span,
|
||||
},
|
||||
|
@ -108,6 +108,7 @@ fn stats(
|
||||
input.map(
|
||||
move |v| {
|
||||
let value_span = v.span();
|
||||
let type_ = v.get_type();
|
||||
// First, obtain the span. If this fails, propagate the error that results.
|
||||
if let Value::Error { error, .. } = v {
|
||||
return Value::error(*error, span);
|
||||
@ -116,8 +117,9 @@ fn stats(
|
||||
match v.coerce_into_string() {
|
||||
Ok(s) => counter(&s, span),
|
||||
Err(_) => Value::error(
|
||||
ShellError::PipelineMismatch {
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".into(),
|
||||
wrong_type: type_.to_string(),
|
||||
dst_span: span,
|
||||
src_span: value_span,
|
||||
},
|
||||
|
@ -98,7 +98,7 @@ pub(crate) fn finish_redirection(
|
||||
if !matches!(
|
||||
modes.err,
|
||||
Some(Spanned {
|
||||
item: RedirectMode::Pipe { .. },
|
||||
item: RedirectMode::Pipe,
|
||||
..
|
||||
})
|
||||
) {
|
||||
|
@ -323,9 +323,7 @@ fn repeat_vertical(
|
||||
c: char,
|
||||
style: TextStyle,
|
||||
) {
|
||||
let text = std::iter::repeat(c)
|
||||
.take(width as usize)
|
||||
.collect::<String>();
|
||||
let text = std::iter::repeat_n(c, width as usize).collect::<String>();
|
||||
let style = text_style_to_tui_style(style);
|
||||
let span = Span::styled(text, style);
|
||||
|
||||
|
@ -3,12 +3,13 @@ use std::sync::Arc;
|
||||
use crate::{span_to_range, uri_to_path, LanguageServer};
|
||||
use lsp_types::{
|
||||
CompletionItem, CompletionItemKind, CompletionItemLabelDetails, CompletionParams,
|
||||
CompletionResponse, CompletionTextEdit, Documentation, MarkupContent, MarkupKind, TextEdit,
|
||||
CompletionResponse, CompletionTextEdit, Documentation, InsertTextFormat, MarkupContent,
|
||||
MarkupKind, Range, TextEdit,
|
||||
};
|
||||
use nu_cli::{NuCompleter, SuggestionKind};
|
||||
use nu_cli::{NuCompleter, SemanticSuggestion, SuggestionKind};
|
||||
use nu_protocol::{
|
||||
engine::{CommandType, Stack},
|
||||
Span,
|
||||
engine::{CommandType, EngineState, Stack},
|
||||
PositionalArg, Span, SyntaxShape,
|
||||
};
|
||||
|
||||
impl LanguageServer {
|
||||
@ -29,7 +30,7 @@ impl LanguageServer {
|
||||
.is_some_and(|c| c.is_whitespace() || "|(){}[]<>,:;".contains(c));
|
||||
|
||||
self.need_parse |= need_fallback;
|
||||
let engine_state = Arc::new(self.new_engine_state());
|
||||
let engine_state = Arc::new(self.new_engine_state(Some(&path_uri)));
|
||||
let completer = NuCompleter::new(engine_state.clone(), Arc::new(Stack::new()));
|
||||
let results = if need_fallback {
|
||||
completer.fetch_completions_at(&file_text[..location], location)
|
||||
@ -45,27 +46,96 @@ impl LanguageServer {
|
||||
results
|
||||
.into_iter()
|
||||
.map(|r| {
|
||||
let decl_id = r.kind.clone().and_then(|kind| {
|
||||
matches!(kind, SuggestionKind::Command(_))
|
||||
.then_some(engine_state.find_decl(r.suggestion.value.as_bytes(), &[])?)
|
||||
});
|
||||
|
||||
let mut label_value = r.suggestion.value;
|
||||
if r.suggestion.append_whitespace {
|
||||
label_value.push(' ');
|
||||
let reedline_span = r.suggestion.span;
|
||||
Self::completion_item_from_suggestion(
|
||||
&engine_state,
|
||||
r,
|
||||
span_to_range(&Span::new(reedline_span.start, reedline_span.end), file, 0),
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
|
||||
fn completion_item_from_suggestion(
|
||||
engine_state: &EngineState,
|
||||
suggestion: SemanticSuggestion,
|
||||
range: Range,
|
||||
) -> CompletionItem {
|
||||
let decl_id = suggestion.kind.as_ref().and_then(|kind| {
|
||||
matches!(kind, SuggestionKind::Command(_))
|
||||
.then_some(engine_state.find_decl(suggestion.suggestion.value.as_bytes(), &[])?)
|
||||
});
|
||||
|
||||
let mut snippet_text = suggestion.suggestion.value.clone();
|
||||
let mut doc_string = suggestion.suggestion.extra.map(|ex| ex.join("\n"));
|
||||
let mut insert_text_format = None;
|
||||
let mut idx = 0;
|
||||
// use snippet as `insert_text_format` for command argument completion
|
||||
if let Some(decl_id) = decl_id {
|
||||
let cmd = engine_state.get_decl(decl_id);
|
||||
doc_string = Some(Self::get_decl_description(cmd, true));
|
||||
insert_text_format = Some(InsertTextFormat::SNIPPET);
|
||||
let signature = cmd.signature();
|
||||
// add curly brackets around block arguments
|
||||
// and keywords, e.g. `=` in `alias foo = bar`
|
||||
let mut arg_wrapper = |arg: &PositionalArg, text: String, optional: bool| -> String {
|
||||
idx += 1;
|
||||
match &arg.shape {
|
||||
SyntaxShape::Block | SyntaxShape::MatchBlock => {
|
||||
format!("{{ ${{{}:{}}} }}", idx, text)
|
||||
}
|
||||
SyntaxShape::Keyword(kwd, _) => {
|
||||
// NOTE: If optional, the keyword should also be in a placeholder so that it can be removed easily.
|
||||
// Here we choose to use nested placeholders. Note that some editors don't fully support this format,
|
||||
// but usually they will simply ignore the inner ones, so it should be fine.
|
||||
if optional {
|
||||
idx += 1;
|
||||
format!(
|
||||
"${{{}:{} ${{{}:{}}}}}",
|
||||
idx - 1,
|
||||
String::from_utf8_lossy(kwd),
|
||||
idx,
|
||||
text
|
||||
)
|
||||
} else {
|
||||
format!("{} ${{{}:{}}}", String::from_utf8_lossy(kwd), idx, text)
|
||||
}
|
||||
}
|
||||
_ => format!("${{{}:{}}}", idx, text),
|
||||
}
|
||||
};
|
||||
|
||||
for required in signature.required_positional {
|
||||
snippet_text.push(' ');
|
||||
snippet_text
|
||||
.push_str(arg_wrapper(&required, required.name.clone(), false).as_str());
|
||||
}
|
||||
for optional in signature.optional_positional {
|
||||
snippet_text.push(' ');
|
||||
snippet_text
|
||||
.push_str(arg_wrapper(&optional, format!("{}?", optional.name), true).as_str());
|
||||
}
|
||||
if let Some(rest) = signature.rest_positional {
|
||||
idx += 1;
|
||||
snippet_text.push_str(format!(" ${{{}:...{}}}", idx, rest.name).as_str());
|
||||
}
|
||||
}
|
||||
// no extra space for a command with args expanded in the snippet
|
||||
if idx == 0 && suggestion.suggestion.append_whitespace {
|
||||
snippet_text.push(' ');
|
||||
}
|
||||
|
||||
let span = r.suggestion.span;
|
||||
let text_edit = Some(CompletionTextEdit::Edit(TextEdit {
|
||||
range: span_to_range(&Span::new(span.start, span.end), file, 0),
|
||||
new_text: label_value.clone(),
|
||||
range,
|
||||
new_text: snippet_text,
|
||||
}));
|
||||
|
||||
CompletionItem {
|
||||
label: label_value,
|
||||
label_details: r
|
||||
label: suggestion.suggestion.value,
|
||||
label_details: suggestion
|
||||
.kind
|
||||
.clone()
|
||||
.as_ref()
|
||||
.map(|kind| match kind {
|
||||
SuggestionKind::Value(t) => t.to_string(),
|
||||
SuggestionKind::Command(cmd) => cmd.to_string(),
|
||||
@ -79,27 +149,18 @@ impl LanguageServer {
|
||||
detail: None,
|
||||
description: Some(s),
|
||||
}),
|
||||
detail: r.suggestion.description,
|
||||
documentation: r
|
||||
.suggestion
|
||||
.extra
|
||||
.map(|ex| ex.join("\n"))
|
||||
.or(decl_id.map(|decl_id| {
|
||||
Self::get_decl_description(engine_state.get_decl(decl_id), true)
|
||||
}))
|
||||
.map(|value| {
|
||||
detail: suggestion.suggestion.description,
|
||||
documentation: doc_string.map(|value| {
|
||||
Documentation::MarkupContent(MarkupContent {
|
||||
kind: MarkupKind::Markdown,
|
||||
value,
|
||||
})
|
||||
}),
|
||||
kind: Self::lsp_completion_item_kind(r.kind),
|
||||
kind: Self::lsp_completion_item_kind(suggestion.kind),
|
||||
text_edit,
|
||||
insert_text_format,
|
||||
..Default::default()
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
|
||||
fn lsp_completion_item_kind(
|
||||
@ -264,10 +325,10 @@ mod tests {
|
||||
let resp = send_complete_request(&client_connection, script.clone(), 2, 18);
|
||||
assert!(result_from_message(resp).as_array().unwrap().contains(
|
||||
&serde_json::json!({
|
||||
"label": "LICENSE",
|
||||
"label": "command.nu",
|
||||
"labelDetails": { "description": "" },
|
||||
"textEdit": { "range": { "start": { "line": 2, "character": 17 }, "end": { "line": 2, "character": 18 }, },
|
||||
"newText": "LICENSE"
|
||||
"newText": "command.nu"
|
||||
},
|
||||
"kind": 17
|
||||
})
|
||||
@ -310,8 +371,9 @@ mod tests {
|
||||
"detail": "Alias a command (with optional flags) to a new name.",
|
||||
"textEdit": {
|
||||
"range": { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 0 }, },
|
||||
"newText": "alias "
|
||||
"newText": "alias ${1:name} = ${2:initial_value}"
|
||||
},
|
||||
"insertTextFormat": 2,
|
||||
"kind": 14
|
||||
}
|
||||
])
|
||||
@ -327,8 +389,9 @@ mod tests {
|
||||
"detail": "Alias a command (with optional flags) to a new name.",
|
||||
"textEdit": {
|
||||
"range": { "start": { "line": 3, "character": 2 }, "end": { "line": 3, "character": 2 }, },
|
||||
"newText": "alias "
|
||||
"newText": "alias ${1:name} = ${2:initial_value}"
|
||||
},
|
||||
"insertTextFormat": 2,
|
||||
"kind": 14
|
||||
}
|
||||
])
|
||||
@ -337,10 +400,10 @@ mod tests {
|
||||
let resp = send_complete_request(&client_connection, script, 5, 4);
|
||||
assert!(result_from_message(resp).as_array().unwrap().contains(
|
||||
&serde_json::json!({
|
||||
"label": "LICENSE",
|
||||
"label": "cell_path.nu",
|
||||
"labelDetails": { "description": "" },
|
||||
"textEdit": { "range": { "start": { "line": 5, "character": 3 }, "end": { "line": 5, "character": 4 }, },
|
||||
"newText": "LICENSE"
|
||||
"newText": "cell_path.nu"
|
||||
},
|
||||
"kind": 17
|
||||
})
|
||||
@ -369,8 +432,9 @@ mod tests {
|
||||
"detail": "Trim whitespace or specific character.",
|
||||
"textEdit": {
|
||||
"range": { "start": { "line": 0, "character": 8 }, "end": { "line": 0, "character": 13 }, },
|
||||
"newText": "str trim "
|
||||
"newText": "str trim ${1:...rest}"
|
||||
},
|
||||
"insertTextFormat": 2,
|
||||
"kind": 3
|
||||
}
|
||||
])
|
||||
@ -488,7 +552,7 @@ mod tests {
|
||||
"detail": "Alias a command (with optional flags) to a new name.",
|
||||
"textEdit": {
|
||||
"range": { "start": { "line": 0, "character": 5 }, "end": { "line": 0, "character": 5 }, },
|
||||
"newText": "alias "
|
||||
"newText": "alias ${1:name} = ${2:initial_value}"
|
||||
},
|
||||
"kind": 14
|
||||
},
|
||||
|
@ -7,7 +7,7 @@ use miette::{miette, IntoDiagnostic, Result};
|
||||
|
||||
impl LanguageServer {
|
||||
pub(crate) fn publish_diagnostics_for_file(&mut self, uri: Uri) -> Result<()> {
|
||||
let mut engine_state = self.new_engine_state();
|
||||
let mut engine_state = self.new_engine_state(Some(&uri));
|
||||
engine_state.generate_nu_constant();
|
||||
|
||||
let Some((_, span, working_set)) = self.parse_file(&mut engine_state, &uri, true) else {
|
||||
|
@ -77,13 +77,12 @@ impl LanguageServer {
|
||||
&mut self,
|
||||
params: &GotoDefinitionParams,
|
||||
) -> Option<GotoDefinitionResponse> {
|
||||
let mut engine_state = self.new_engine_state();
|
||||
|
||||
let path_uri = params
|
||||
.text_document_position_params
|
||||
.text_document
|
||||
.uri
|
||||
.to_owned();
|
||||
let mut engine_state = self.new_engine_state(Some(&path_uri));
|
||||
let (working_set, id, _, _) = self
|
||||
.parse_and_find(
|
||||
&mut engine_state,
|
||||
|
@ -1,7 +1,10 @@
|
||||
use lsp_types::{Hover, HoverContents, HoverParams, MarkupContent, MarkupKind};
|
||||
use nu_protocol::engine::Command;
|
||||
use nu_protocol::{engine::Command, PositionalArg};
|
||||
|
||||
use crate::{Id, LanguageServer};
|
||||
use crate::{
|
||||
signature::{display_flag, doc_for_arg, get_signature_label},
|
||||
Id, LanguageServer,
|
||||
};
|
||||
|
||||
impl LanguageServer {
|
||||
pub(crate) fn get_decl_description(decl: &dyn Command, skip_description: bool) -> String {
|
||||
@ -19,35 +22,27 @@ impl LanguageServer {
|
||||
// Usage
|
||||
description.push_str("---\n### Usage \n```nu\n");
|
||||
let signature = decl.signature();
|
||||
description.push_str(&Self::get_signature_label(&signature));
|
||||
description.push_str(&get_signature_label(&signature, true));
|
||||
description.push_str("\n```\n");
|
||||
|
||||
// Flags
|
||||
if !signature.named.is_empty() {
|
||||
description.push_str("\n### Flags\n\n");
|
||||
let mut first = true;
|
||||
for named in &signature.named {
|
||||
for named in signature.named {
|
||||
if first {
|
||||
first = false;
|
||||
} else {
|
||||
description.push('\n');
|
||||
}
|
||||
description.push_str(" ");
|
||||
if let Some(short_flag) = &named.short {
|
||||
description.push_str(&format!("`-{short_flag}`"));
|
||||
}
|
||||
if !named.long.is_empty() {
|
||||
if named.short.is_some() {
|
||||
description.push_str(", ");
|
||||
}
|
||||
description.push_str(&format!("`--{}`", named.long));
|
||||
}
|
||||
if let Some(arg) = &named.arg {
|
||||
description.push_str(&format!(" `<{}>`", arg.to_type()));
|
||||
}
|
||||
if !named.desc.is_empty() {
|
||||
description.push_str(&format!(" - {}", named.desc));
|
||||
}
|
||||
description.push_str(&display_flag(&named, true));
|
||||
description.push_str(&doc_for_arg(
|
||||
named.arg,
|
||||
named.desc,
|
||||
named.default_value,
|
||||
false,
|
||||
));
|
||||
description.push('\n');
|
||||
}
|
||||
description.push('\n');
|
||||
@ -60,46 +55,38 @@ impl LanguageServer {
|
||||
{
|
||||
description.push_str("\n### Parameters\n\n");
|
||||
let mut first = true;
|
||||
for required_arg in &signature.required_positional {
|
||||
let mut write_arg = |arg: PositionalArg, optional: bool| {
|
||||
if first {
|
||||
first = false;
|
||||
} else {
|
||||
description.push('\n');
|
||||
}
|
||||
description.push_str(&format!(
|
||||
" `{}: {}`",
|
||||
required_arg.name,
|
||||
required_arg.shape.to_type()
|
||||
description.push_str(&format!(" `{}`", arg.name));
|
||||
description.push_str(&doc_for_arg(
|
||||
Some(arg.shape),
|
||||
arg.desc,
|
||||
arg.default_value,
|
||||
optional,
|
||||
));
|
||||
if !required_arg.desc.is_empty() {
|
||||
description.push_str(&format!(" - {}", required_arg.desc));
|
||||
}
|
||||
description.push('\n');
|
||||
};
|
||||
for required_arg in signature.required_positional {
|
||||
write_arg(required_arg, false);
|
||||
}
|
||||
for optional_arg in &signature.optional_positional {
|
||||
if first {
|
||||
first = false;
|
||||
} else {
|
||||
description.push('\n');
|
||||
for optional_arg in signature.optional_positional {
|
||||
write_arg(optional_arg, true);
|
||||
}
|
||||
description.push_str(&format!(
|
||||
" `{}: {}`",
|
||||
optional_arg.name,
|
||||
optional_arg.shape.to_type()
|
||||
));
|
||||
if !optional_arg.desc.is_empty() {
|
||||
description.push_str(&format!(" - {}", optional_arg.desc));
|
||||
}
|
||||
description.push('\n');
|
||||
}
|
||||
if let Some(arg) = &signature.rest_positional {
|
||||
if let Some(arg) = signature.rest_positional {
|
||||
if !first {
|
||||
description.push('\n');
|
||||
}
|
||||
description.push_str(&format!(" `...{}: {}`", arg.name, arg.shape.to_type()));
|
||||
if !arg.desc.is_empty() {
|
||||
description.push_str(&format!(" - {}", arg.desc));
|
||||
}
|
||||
description.push_str(&format!(" `...{}`", arg.name));
|
||||
description.push_str(&doc_for_arg(
|
||||
Some(arg.shape),
|
||||
arg.desc,
|
||||
arg.default_value,
|
||||
false,
|
||||
));
|
||||
description.push('\n');
|
||||
}
|
||||
description.push('\n');
|
||||
@ -129,13 +116,12 @@ impl LanguageServer {
|
||||
}
|
||||
|
||||
pub(crate) fn hover(&mut self, params: &HoverParams) -> Option<Hover> {
|
||||
let mut engine_state = self.new_engine_state();
|
||||
|
||||
let path_uri = params
|
||||
.text_document_position_params
|
||||
.text_document
|
||||
.uri
|
||||
.to_owned();
|
||||
let mut engine_state = self.new_engine_state(Some(&path_uri));
|
||||
let (working_set, id, _, _) = self
|
||||
.parse_and_find(
|
||||
&mut engine_state,
|
||||
@ -379,7 +365,7 @@ mod hover_tests {
|
||||
serde_json::json!({
|
||||
"contents": {
|
||||
"kind": "markdown",
|
||||
"value": "Concatenate multiple strings into a single string, with an optional separator between each.\n---\n### Usage \n```nu\n str join {flags} <separator?>\n```\n\n### Flags\n\n `-h`, `--help` - Display the help message for this command\n\n\n### Parameters\n\n `separator: string` - Optional separator to use when creating string.\n\n\n### Input/output types\n\n```nu\n list<any> | string\n string | string\n\n```\n### Example(s)\n Create a string from input\n```nu\n ['nu', 'shell'] | str join\n```\n Create a string from input with a separator\n```nu\n ['nu', 'shell'] | str join '-'\n```\n"
|
||||
"value": "Concatenate multiple strings into a single string, with an optional separator between each.\n---\n### Usage \n```nu\n str join {flags} (separator)\n```\n\n### Flags\n\n `-h`, `--help` - Display the help message for this command\n\n\n### Parameters\n\n `separator`: `<string>` - Optional separator to use when creating string. (optional)\n\n\n### Input/output types\n\n```nu\n list<any> | string\n string | string\n\n```\n### Example(s)\n Create a string from input\n```nu\n ['nu', 'shell'] | str join\n```\n Create a string from input with a separator\n```nu\n ['nu', 'shell'] | str join '-'\n```\n"
|
||||
}
|
||||
})
|
||||
);
|
||||
|
@ -13,7 +13,7 @@ use miette::{miette, IntoDiagnostic, Result};
|
||||
use nu_protocol::{
|
||||
ast::{Block, PathMember},
|
||||
engine::{EngineState, StateDelta, StateWorkingSet},
|
||||
DeclId, ModuleId, Span, Type, VarId,
|
||||
DeclId, ModuleId, Span, Type, Value, VarId,
|
||||
};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
@ -315,13 +315,26 @@ impl LanguageServer {
|
||||
Ok(reset)
|
||||
}
|
||||
|
||||
pub(crate) fn new_engine_state(&self) -> EngineState {
|
||||
/// Create a clone of the initial_engine_state with:
|
||||
///
|
||||
/// * PWD set to the parent directory of given uri. Fallback to `$env.PWD` if None.
|
||||
/// * `StateDelta` cache merged
|
||||
pub(crate) fn new_engine_state(&self, uri: Option<&Uri>) -> EngineState {
|
||||
let mut engine_state = self.initial_engine_state.clone();
|
||||
let cwd = std::env::current_dir().expect("Could not get current working directory.");
|
||||
engine_state.add_env_var(
|
||||
"PWD".into(),
|
||||
nu_protocol::Value::test_string(cwd.to_string_lossy()),
|
||||
);
|
||||
match uri {
|
||||
Some(uri) => {
|
||||
let path = uri_to_path(uri);
|
||||
if let Some(path) = path.parent() {
|
||||
engine_state
|
||||
.add_env_var("PWD".into(), Value::test_string(path.to_string_lossy()))
|
||||
};
|
||||
}
|
||||
None => {
|
||||
let cwd =
|
||||
std::env::current_dir().expect("Could not get current working directory.");
|
||||
engine_state.add_env_var("PWD".into(), Value::test_string(cwd.to_string_lossy()));
|
||||
}
|
||||
}
|
||||
// merge the cached `StateDelta` if text not changed
|
||||
if !self.need_parse {
|
||||
engine_state
|
||||
@ -350,7 +363,7 @@ impl LanguageServer {
|
||||
engine_state: &'a mut EngineState,
|
||||
uri: &Uri,
|
||||
pos: Position,
|
||||
) -> Result<(StateWorkingSet<'a>, Id, Span, usize)> {
|
||||
) -> Result<(StateWorkingSet<'a>, Id, Span, Span)> {
|
||||
let (block, file_span, working_set) = self
|
||||
.parse_file(engine_state, uri, false)
|
||||
.ok_or_else(|| miette!("\nFailed to parse current file"))?;
|
||||
@ -365,7 +378,7 @@ impl LanguageServer {
|
||||
let location = file.offset_at(pos) as usize + file_span.start;
|
||||
let (id, span) = ast::find_id(&block, &working_set, &location)
|
||||
.ok_or_else(|| miette!("\nFailed to find current name"))?;
|
||||
Ok((working_set, id, span, file_span.start))
|
||||
Ok((working_set, id, span, file_span))
|
||||
}
|
||||
|
||||
pub(crate) fn parse_file<'a>(
|
||||
@ -458,10 +471,7 @@ mod tests {
|
||||
engine_state.generate_nu_constant();
|
||||
assert!(load_standard_library(&mut engine_state).is_ok());
|
||||
let cwd = std::env::current_dir().expect("Could not get current working directory.");
|
||||
engine_state.add_env_var(
|
||||
"PWD".into(),
|
||||
nu_protocol::Value::test_string(cwd.to_string_lossy()),
|
||||
);
|
||||
engine_state.add_env_var("PWD".into(), Value::test_string(cwd.to_string_lossy()));
|
||||
if let Some(code) = nu_config_code {
|
||||
assert!(merge_input(code.as_bytes(), &mut engine_state, &mut Stack::new()).is_ok());
|
||||
}
|
||||
|
@ -162,7 +162,7 @@ mod tests {
|
||||
serde_json::json!({
|
||||
"contents": {
|
||||
"kind": "markdown",
|
||||
"value": "Create a variable and give it a value.\n\nThis command is a parser keyword. For details, check:\n https://www.nushell.sh/book/thinking_in_nu.html\n---\n### Usage \n```nu\n let {flags} <var_name> <initial_value>\n```\n\n### Flags\n\n `-h`, `--help` - Display the help message for this command\n\n\n### Parameters\n\n `var_name: any` - Variable name.\n\n `initial_value: any` - Equals sign followed by value.\n\n\n### Input/output types\n\n```nu\n any | nothing\n\n```\n### Example(s)\n Set a variable to a value\n```nu\n let x = 10\n```\n Set a variable to the result of an expression\n```nu\n let x = 10 + 100\n```\n Set a variable based on the condition\n```nu\n let x = if false { -1 } else { 1 }\n```\n"
|
||||
"value": "Create a variable and give it a value.\n\nThis command is a parser keyword. For details, check:\n https://www.nushell.sh/book/thinking_in_nu.html\n---\n### Usage \n```nu\n let {flags} <var_name> = <initial_value>\n```\n\n### Flags\n\n `-h`, `--help` - Display the help message for this command\n\n\n### Parameters\n\n `var_name`: `<vardecl>` - Variable name.\n\n `initial_value`: `<variable>` - Equals sign followed by value.\n\n\n### Input/output types\n\n```nu\n any | nothing\n\n```\n### Example(s)\n Set a variable to a value\n```nu\n let x = 10\n```\n Set a variable to the result of an expression\n```nu\n let x = 10 + 100\n```\n Set a variable based on the condition\n```nu\n let x = if false { -1 } else { 1 }\n```\n"
|
||||
}
|
||||
})
|
||||
);
|
||||
|
@ -5,7 +5,7 @@ use lsp_types::{
|
||||
use nu_protocol::{
|
||||
ast::{Argument, Call, Expr, Expression, FindMapResult, Traverse},
|
||||
engine::StateWorkingSet,
|
||||
PositionalArg, Signature,
|
||||
Flag, PositionalArg, Signature, SyntaxShape, Value,
|
||||
};
|
||||
|
||||
use crate::{uri_to_path, LanguageServer};
|
||||
@ -35,34 +35,85 @@ fn find_active_internal_call<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageServer {
|
||||
pub(crate) fn get_signature_label(signature: &Signature) -> String {
|
||||
pub(crate) fn display_flag(flag: &Flag, verbitam: bool) -> String {
|
||||
let md_backtick = if verbitam { "`" } else { "" };
|
||||
let mut text = String::new();
|
||||
if let Some(short_flag) = flag.short {
|
||||
text.push_str(&format!("{md_backtick}-{short_flag}{md_backtick}"));
|
||||
}
|
||||
if !flag.long.is_empty() {
|
||||
if flag.short.is_some() {
|
||||
text.push_str(", ");
|
||||
}
|
||||
text.push_str(&format!("{md_backtick}--{}{md_backtick}", flag.long));
|
||||
}
|
||||
text
|
||||
}
|
||||
|
||||
pub(crate) fn doc_for_arg(
|
||||
syntax_shape: Option<SyntaxShape>,
|
||||
desc: String,
|
||||
default_value: Option<Value>,
|
||||
optional: bool,
|
||||
) -> String {
|
||||
let mut text = String::new();
|
||||
if let Some(mut shape) = syntax_shape {
|
||||
if let SyntaxShape::Keyword(_, inner_shape) = shape {
|
||||
shape = *inner_shape;
|
||||
}
|
||||
text.push_str(&format!(": `<{}>`", shape));
|
||||
}
|
||||
if !(desc.is_empty() && default_value.is_none()) || optional {
|
||||
text.push_str(" -")
|
||||
};
|
||||
if !desc.is_empty() {
|
||||
text.push_str(&format!(" {}", desc));
|
||||
};
|
||||
if let Some(value) = default_value.as_ref().and_then(|v| v.coerce_str().ok()) {
|
||||
text.push_str(&format!(
|
||||
" ({}default: `{value}`)",
|
||||
if optional { "optional, " } else { "" }
|
||||
));
|
||||
} else if optional {
|
||||
text.push_str(" (optional)");
|
||||
}
|
||||
text
|
||||
}
|
||||
|
||||
pub(crate) fn get_signature_label(signature: &Signature, indent: bool) -> String {
|
||||
let expand_keyword = |arg: &PositionalArg, optional: bool| match &arg.shape {
|
||||
SyntaxShape::Keyword(kwd, _) => {
|
||||
format!("{} <{}>", String::from_utf8_lossy(kwd), arg.name)
|
||||
}
|
||||
_ => {
|
||||
if optional {
|
||||
arg.name.clone()
|
||||
} else {
|
||||
format!("<{}>", arg.name)
|
||||
}
|
||||
}
|
||||
};
|
||||
let mut label = String::new();
|
||||
label.push_str(&format!(" {}", signature.name));
|
||||
if indent {
|
||||
label.push_str(" ");
|
||||
}
|
||||
label.push_str(&signature.name);
|
||||
if !signature.named.is_empty() {
|
||||
label.push_str(" {flags}");
|
||||
}
|
||||
for required_arg in &signature.required_positional {
|
||||
label.push_str(&format!(" <{}>", required_arg.name));
|
||||
label.push_str(&format!(" {}", expand_keyword(required_arg, false)));
|
||||
}
|
||||
for optional_arg in &signature.optional_positional {
|
||||
let value_info = if let Some(value) = optional_arg
|
||||
.default_value
|
||||
.as_ref()
|
||||
.and_then(|v| v.coerce_str().ok())
|
||||
{
|
||||
format!("={}", value)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
label.push_str(&format!(" <{}?{}>", optional_arg.name, value_info));
|
||||
label.push_str(&format!(" ({})", expand_keyword(optional_arg, true)));
|
||||
}
|
||||
if let Some(arg) = &signature.rest_positional {
|
||||
label.push_str(&format!(" <...{}>", arg.name));
|
||||
label.push_str(&format!(" ...({})", arg.name));
|
||||
}
|
||||
label
|
||||
}
|
||||
|
||||
impl LanguageServer {
|
||||
pub(crate) fn get_signature_help(
|
||||
&mut self,
|
||||
params: &SignatureHelpParams,
|
||||
@ -78,7 +129,7 @@ impl LanguageServer {
|
||||
let file_text = file.get_content(None).to_owned();
|
||||
drop(docs);
|
||||
|
||||
let engine_state = self.new_engine_state();
|
||||
let engine_state = self.new_engine_state(Some(&path_uri));
|
||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||
|
||||
// NOTE: in case the cursor is at the end of the call expression
|
||||
@ -120,6 +171,7 @@ impl LanguageServer {
|
||||
find_active_internal_call(expr, &working_set, pos_to_search)
|
||||
})?;
|
||||
let active_signature = working_set.get_decl(active_call.decl_id).signature();
|
||||
let label = get_signature_label(&active_signature, false);
|
||||
|
||||
let mut param_num_before_pos = 0;
|
||||
for arg in active_call.arguments.iter() {
|
||||
@ -133,39 +185,51 @@ impl LanguageServer {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let str_to_doc = |s: String| {
|
||||
Some(Documentation::MarkupContent(MarkupContent {
|
||||
kind: MarkupKind::Markdown,
|
||||
value: s,
|
||||
}))
|
||||
};
|
||||
let arg_to_param_info = |arg: &PositionalArg| ParameterInformation {
|
||||
label: lsp_types::ParameterLabel::Simple(arg.name.to_owned()),
|
||||
documentation: str_to_doc(format!(
|
||||
": `<{}>` - {}",
|
||||
arg.shape.to_type(),
|
||||
arg.desc.to_owned()
|
||||
let arg_to_param_info = |arg: PositionalArg, optional: bool| ParameterInformation {
|
||||
label: lsp_types::ParameterLabel::Simple(arg.name),
|
||||
documentation: str_to_doc(doc_for_arg(
|
||||
Some(arg.shape),
|
||||
arg.desc,
|
||||
arg.default_value,
|
||||
optional,
|
||||
)),
|
||||
};
|
||||
let flag_to_param_info = |flag: Flag| ParameterInformation {
|
||||
label: lsp_types::ParameterLabel::Simple(display_flag(&flag, false)),
|
||||
documentation: str_to_doc(doc_for_arg(flag.arg, flag.desc, flag.default_value, false)),
|
||||
};
|
||||
|
||||
// positional args
|
||||
let mut parameters: Vec<ParameterInformation> = active_signature
|
||||
.required_positional
|
||||
.iter()
|
||||
.map(arg_to_param_info)
|
||||
.into_iter()
|
||||
.map(|arg| arg_to_param_info(arg, false))
|
||||
.chain(
|
||||
active_signature
|
||||
.optional_positional
|
||||
.iter()
|
||||
.map(arg_to_param_info),
|
||||
.into_iter()
|
||||
.map(|arg| arg_to_param_info(arg, true)),
|
||||
)
|
||||
.collect();
|
||||
if let Some(rest_arg) = &active_signature.rest_positional {
|
||||
parameters.push(arg_to_param_info(rest_arg));
|
||||
if let Some(rest_arg) = active_signature.rest_positional {
|
||||
parameters.push(arg_to_param_info(rest_arg, false));
|
||||
}
|
||||
|
||||
let max_idx = parameters.len().saturating_sub(1) as u32;
|
||||
let active_parameter = Some(param_num_before_pos.min(max_idx));
|
||||
// also include flags in the end, just for documentation
|
||||
parameters.extend(active_signature.named.into_iter().map(flag_to_param_info));
|
||||
|
||||
Some(SignatureHelp {
|
||||
signatures: vec![SignatureInformation {
|
||||
label: Self::get_signature_label(&active_signature),
|
||||
label,
|
||||
documentation: str_to_doc(active_signature.description),
|
||||
parameters: Some(parameters),
|
||||
active_parameter,
|
||||
@ -233,7 +297,7 @@ mod tests {
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!({
|
||||
"signatures": [{
|
||||
"label": " str substring {flags} <range> <...rest>",
|
||||
"label": "str substring {flags} <range> ...(rest)",
|
||||
"parameters": [ ],
|
||||
"activeParameter": 0
|
||||
}],
|
||||
@ -263,7 +327,7 @@ mod tests {
|
||||
assert_json_include!(
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!({ "signatures": [{
|
||||
"label": " str substring {flags} <range> <...rest>",
|
||||
"label": "str substring {flags} <range> ...(rest)",
|
||||
"activeParameter": 1
|
||||
}]})
|
||||
);
|
||||
@ -272,7 +336,7 @@ mod tests {
|
||||
assert_json_include!(
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!({ "signatures": [{
|
||||
"label": " str substring {flags} <range> <...rest>",
|
||||
"label": "str substring {flags} <range> ...(rest)",
|
||||
"activeParameter": 0
|
||||
}]})
|
||||
);
|
||||
@ -281,7 +345,7 @@ mod tests {
|
||||
assert_json_include!(
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!({ "signatures": [{
|
||||
"label": " echo {flags} <...rest>",
|
||||
"label": "echo {flags} ...(rest)",
|
||||
"activeParameter": 0
|
||||
}]})
|
||||
);
|
||||
@ -291,8 +355,8 @@ mod tests {
|
||||
fn signature_help_on_custom_commands() {
|
||||
let config_str = r#"export def "foo bar" [
|
||||
p1: int
|
||||
p2: string,
|
||||
p3?: int = 1 # doc
|
||||
p2: string, # doc
|
||||
p3?: int = 1
|
||||
] {}"#;
|
||||
let (client_connection, _recv) = initialize_language_server(Some(config_str), None);
|
||||
|
||||
@ -308,11 +372,11 @@ mod tests {
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!({
|
||||
"signatures": [{
|
||||
"label": " foo bar {flags} <p1> <p2> <p3?=1>",
|
||||
"label": "foo bar {flags} <p1> <p2> (p3)",
|
||||
"parameters": [
|
||||
{"label": "p1", "documentation": {"value": ": `<int>` - "}},
|
||||
{"label": "p2", "documentation": {"value": ": `<string>` - "}},
|
||||
{"label": "p3", "documentation": {"value": ": `<int>` - doc"}},
|
||||
{"label": "p1", "documentation": {"value": ": `<int>`"}},
|
||||
{"label": "p2", "documentation": {"value": ": `<string>` - doc"}},
|
||||
{"label": "p3", "documentation": {"value": ": `<int>` - (optional, default: `1`)"}},
|
||||
],
|
||||
"activeParameter": 1
|
||||
}],
|
||||
@ -326,11 +390,12 @@ mod tests {
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!({
|
||||
"signatures": [{
|
||||
"label": " foo baz {flags} <p1> <p2> <p3?=1>",
|
||||
"label": "foo baz {flags} <p1> <p2> (p3)",
|
||||
"parameters": [
|
||||
{"label": "p1", "documentation": {"value": ": `<int>` - "}},
|
||||
{"label": "p2", "documentation": {"value": ": `<string>` - "}},
|
||||
{"label": "p3", "documentation": {"value": ": `<int>` - doc"}},
|
||||
{"label": "p1", "documentation": {"value": ": `<int>`"}},
|
||||
{"label": "p2", "documentation": {"value": ": `<string>` - doc"}},
|
||||
{"label": "p3", "documentation": {"value": ": `<int>` - (optional, default: `1`)"}},
|
||||
{"label": "-h, --help", "documentation": {"value": " - Display the help message for this command"}},
|
||||
],
|
||||
"activeParameter": 2
|
||||
}],
|
||||
|
@ -270,8 +270,8 @@ impl LanguageServer {
|
||||
&mut self,
|
||||
params: &DocumentSymbolParams,
|
||||
) -> Option<DocumentSymbolResponse> {
|
||||
let engine_state = self.new_engine_state();
|
||||
let uri = params.text_document.uri.to_owned();
|
||||
let engine_state = self.new_engine_state(Some(&uri));
|
||||
let docs = self.docs.lock().ok()?;
|
||||
self.symbol_cache.update(&uri, &engine_state, &docs);
|
||||
self.symbol_cache
|
||||
@ -284,7 +284,7 @@ impl LanguageServer {
|
||||
params: &WorkspaceSymbolParams,
|
||||
) -> Option<WorkspaceSymbolResponse> {
|
||||
if self.symbol_cache.any_dirty() {
|
||||
let engine_state = self.new_engine_state();
|
||||
let engine_state = self.new_engine_state(None);
|
||||
let docs = self.docs.lock().ok()?;
|
||||
self.symbol_cache.update_all(&engine_state, &docs);
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::{
|
||||
ast::{find_id, find_reference_by_id},
|
||||
ast::{self, find_id, find_reference_by_id},
|
||||
path_to_uri, span_to_range, uri_to_path, Id, LanguageServer,
|
||||
};
|
||||
use lsp_textdocument::FullTextDocument;
|
||||
@ -46,6 +46,26 @@ fn find_nu_scripts_in_folder(folder_uri: &Uri) -> Result<nu_glob::Paths> {
|
||||
nu_glob::glob(&pattern, Uninterruptible).into_diagnostic()
|
||||
}
|
||||
|
||||
/// HACK: when current file is imported (use keyword) by others in the workspace,
|
||||
/// it will get parsed a second time via `parse_module_block`, so that its definitions'
|
||||
/// ids are renewed, making it harder to track the references.
|
||||
///
|
||||
/// FIXME: cross-file shadowing can still cause false-positive/false-negative cases
|
||||
///
|
||||
/// This is a workaround to track the new id
|
||||
struct IDTracker {
|
||||
/// ID to search, renewed on `parse_module_block`
|
||||
pub id: Id,
|
||||
/// Span of the original instance under the cursor
|
||||
pub span: Span,
|
||||
/// Name of the definition
|
||||
pub name: String,
|
||||
/// Span of the original file where the request comes from
|
||||
pub file_span: Span,
|
||||
/// The redundant parsing should only happen once
|
||||
pub renewed: bool,
|
||||
}
|
||||
|
||||
impl LanguageServer {
|
||||
/// Get initial workspace folders from initialization response
|
||||
pub(crate) fn initialize_workspace_folders(
|
||||
@ -66,12 +86,12 @@ impl LanguageServer {
|
||||
&mut self,
|
||||
params: &DocumentHighlightParams,
|
||||
) -> Option<Vec<DocumentHighlight>> {
|
||||
let mut engine_state = self.new_engine_state();
|
||||
let path_uri = params
|
||||
.text_document_position_params
|
||||
.text_document
|
||||
.uri
|
||||
.to_owned();
|
||||
let mut engine_state = self.new_engine_state(Some(&path_uri));
|
||||
let (block, file_span, working_set) =
|
||||
self.parse_file(&mut engine_state, &path_uri, false)?;
|
||||
let docs = &self.docs.lock().ok()?;
|
||||
@ -137,31 +157,38 @@ impl LanguageServer {
|
||||
timeout: u128,
|
||||
) -> Option<Vec<Location>> {
|
||||
self.occurrences = BTreeMap::new();
|
||||
let mut engine_state = self.new_engine_state();
|
||||
let path_uri = params.text_document_position.text_document.uri.to_owned();
|
||||
let (_, id, span, _) = self
|
||||
let mut engine_state = self.new_engine_state(Some(&path_uri));
|
||||
|
||||
let (working_set, id, span, file_span) = self
|
||||
.parse_and_find(
|
||||
&mut engine_state,
|
||||
&path_uri,
|
||||
params.text_document_position.position,
|
||||
)
|
||||
.ok()?;
|
||||
// have to clone it again in order to move to another thread
|
||||
let engine_state = self.new_engine_state();
|
||||
let current_workspace_folder = self.get_workspace_folder_by_uri(&path_uri)?;
|
||||
let token = params
|
||||
.work_done_progress_params
|
||||
.work_done_token
|
||||
.to_owned()
|
||||
.unwrap_or(ProgressToken::Number(1));
|
||||
|
||||
let id_tracker = IDTracker {
|
||||
id,
|
||||
span,
|
||||
file_span,
|
||||
name: String::from_utf8_lossy(working_set.get_span_contents(span)).to_string(),
|
||||
renewed: false,
|
||||
};
|
||||
|
||||
self.channels = self
|
||||
.find_reference_in_workspace(
|
||||
engine_state,
|
||||
current_workspace_folder,
|
||||
id,
|
||||
span,
|
||||
token.clone(),
|
||||
"Finding references ...".to_string(),
|
||||
id_tracker,
|
||||
)
|
||||
.ok();
|
||||
// TODO: WorkDoneProgress -> PartialResults for quicker response
|
||||
@ -200,10 +227,10 @@ impl LanguageServer {
|
||||
serde_json::from_value(request.params).into_diagnostic()?;
|
||||
self.occurrences = BTreeMap::new();
|
||||
|
||||
let mut engine_state = self.new_engine_state();
|
||||
let path_uri = params.text_document.uri.to_owned();
|
||||
let mut engine_state = self.new_engine_state(Some(&path_uri));
|
||||
|
||||
let (working_set, id, span, file_offset) =
|
||||
let (working_set, id, span, file_span) =
|
||||
self.parse_and_find(&mut engine_state, &path_uri, params.position)?;
|
||||
|
||||
if let Id::Value(_) = id {
|
||||
@ -222,7 +249,7 @@ impl LanguageServer {
|
||||
let file = docs
|
||||
.get_document(&path_uri)
|
||||
.ok_or_else(|| miette!("\nFailed to get document"))?;
|
||||
let range = span_to_range(&span, file, file_offset);
|
||||
let range = span_to_range(&span, file, file_span.start);
|
||||
let response = PrepareRenameResponse::Range(range);
|
||||
self.connection
|
||||
.sender
|
||||
@ -233,20 +260,24 @@ impl LanguageServer {
|
||||
}))
|
||||
.into_diagnostic()?;
|
||||
|
||||
// have to clone it again in order to move to another thread
|
||||
let engine_state = self.new_engine_state();
|
||||
let current_workspace_folder = self
|
||||
.get_workspace_folder_by_uri(&path_uri)
|
||||
.ok_or_else(|| miette!("\nCurrent file is not in any workspace"))?;
|
||||
// now continue parsing on other files in the workspace
|
||||
let id_tracker = IDTracker {
|
||||
id,
|
||||
span,
|
||||
file_span,
|
||||
name: String::from_utf8_lossy(working_set.get_span_contents(span)).to_string(),
|
||||
renewed: false,
|
||||
};
|
||||
self.channels = self
|
||||
.find_reference_in_workspace(
|
||||
engine_state,
|
||||
current_workspace_folder,
|
||||
id,
|
||||
span,
|
||||
ProgressToken::Number(0),
|
||||
"Preparing rename ...".to_string(),
|
||||
id_tracker,
|
||||
)
|
||||
.ok();
|
||||
Ok(())
|
||||
@ -256,7 +287,7 @@ impl LanguageServer {
|
||||
working_set: &mut StateWorkingSet,
|
||||
file: &FullTextDocument,
|
||||
fp: &Path,
|
||||
id: &Id,
|
||||
id_tracker: &mut IDTracker,
|
||||
) -> Option<Vec<Span>> {
|
||||
let block = nu_parser::parse(
|
||||
working_set,
|
||||
@ -264,7 +295,25 @@ impl LanguageServer {
|
||||
file.get_content(None).as_bytes(),
|
||||
false,
|
||||
);
|
||||
let references: Vec<Span> = find_reference_by_id(&block, working_set, id);
|
||||
// NOTE: Renew the id if there's a module with the same span as the original file.
|
||||
// This requires that the initial parsing results get merged in the engine_state,
|
||||
// typically they're cached with diagnostics before the prepare_rename/references requests,
|
||||
// so that we don't need to clone and merge delta again.
|
||||
if (!id_tracker.renewed)
|
||||
&& working_set
|
||||
.find_module_by_span(id_tracker.file_span)
|
||||
.is_some()
|
||||
{
|
||||
if let Some(new_block) = working_set.find_block_by_span(id_tracker.file_span) {
|
||||
if let Some((new_id, _)) =
|
||||
ast::find_id(&new_block, working_set, &id_tracker.span.start)
|
||||
{
|
||||
id_tracker.id = new_id;
|
||||
}
|
||||
}
|
||||
id_tracker.renewed = true;
|
||||
}
|
||||
let references: Vec<Span> = find_reference_by_id(&block, working_set, &id_tracker.id);
|
||||
|
||||
// add_block to avoid repeated parsing
|
||||
working_set.add_block(block);
|
||||
@ -304,10 +353,9 @@ impl LanguageServer {
|
||||
&self,
|
||||
engine_state: EngineState,
|
||||
current_workspace_folder: WorkspaceFolder,
|
||||
id: Id,
|
||||
span: Span,
|
||||
token: ProgressToken,
|
||||
message: String,
|
||||
mut id_tracker: IDTracker,
|
||||
) -> Result<(
|
||||
crossbeam_channel::Sender<bool>,
|
||||
Arc<crossbeam_channel::Receiver<InternalMessage>>,
|
||||
@ -333,7 +381,7 @@ impl LanguageServer {
|
||||
.filter_map(|p| p.ok())
|
||||
.collect();
|
||||
let len = scripts.len();
|
||||
let definition_span = Self::find_definition_span_by_id(&working_set, &id);
|
||||
let definition_span = Self::find_definition_span_by_id(&working_set, &id_tracker.id);
|
||||
|
||||
for (i, fp) in scripts.iter().enumerate() {
|
||||
#[cfg(test)]
|
||||
@ -363,9 +411,7 @@ impl LanguageServer {
|
||||
};
|
||||
// skip if the file does not contain what we're looking for
|
||||
let content_string = String::from_utf8_lossy(&bytes);
|
||||
let text_to_search =
|
||||
String::from_utf8_lossy(working_set.get_span_contents(span));
|
||||
if !content_string.contains(text_to_search.as_ref()) {
|
||||
if !content_string.contains(&id_tracker.name) {
|
||||
// progress without any data
|
||||
data_sender
|
||||
.send(InternalMessage::OnGoing(token.clone(), percentage))
|
||||
@ -374,17 +420,17 @@ impl LanguageServer {
|
||||
}
|
||||
&FullTextDocument::new("nu".to_string(), 0, content_string.into())
|
||||
};
|
||||
let _ = Self::find_reference_in_file(&mut working_set, file, fp, &id).map(
|
||||
|mut refs| {
|
||||
let _ = Self::find_reference_in_file(&mut working_set, file, fp, &mut id_tracker)
|
||||
.map(|mut refs| {
|
||||
let file_span = working_set
|
||||
.get_span_for_filename(fp.to_string_lossy().as_ref())
|
||||
.unwrap_or(Span::unknown());
|
||||
if let Some(extra_span) = Self::reference_not_in_ast(
|
||||
&id,
|
||||
&id_tracker.id,
|
||||
&working_set,
|
||||
definition_span,
|
||||
file_span,
|
||||
span,
|
||||
id_tracker.span,
|
||||
) {
|
||||
if !refs.contains(&extra_span) {
|
||||
refs.push(extra_span)
|
||||
@ -400,8 +446,7 @@ impl LanguageServer {
|
||||
data_sender
|
||||
.send(InternalMessage::OnGoing(token.clone(), percentage))
|
||||
.ok();
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
data_sender
|
||||
.send(InternalMessage::Finished(token.clone()))
|
||||
|
@ -2678,7 +2678,7 @@ pub fn parse_unit_value<'res>(
|
||||
|
||||
if let Some((unit, name, convert)) = unit_groups.iter().find(|x| value.ends_with(x.1)) {
|
||||
let lhs_len = value.len() - name.len();
|
||||
let lhs = strip_underscores(value[..lhs_len].as_bytes());
|
||||
let lhs = strip_underscores(&value.as_bytes()[..lhs_len]);
|
||||
let lhs_span = Span::new(span.start, span.start + lhs_len);
|
||||
let unit_span = Span::new(span.start + lhs_len, span.end);
|
||||
if lhs.ends_with('$') {
|
||||
@ -2784,7 +2784,7 @@ pub const FILESIZE_UNIT_GROUPS: &[UnitGroup] = &[
|
||||
(
|
||||
Unit::Filesize(FilesizeUnit::EiB),
|
||||
"EIB",
|
||||
Some((Unit::Filesize(FilesizeUnit::EiB), 1024)),
|
||||
Some((Unit::Filesize(FilesizeUnit::PiB), 1024)),
|
||||
),
|
||||
(Unit::Filesize(FilesizeUnit::B), "B", None),
|
||||
];
|
||||
|
@ -46,7 +46,7 @@ pub fn expand_ndots(path: impl AsRef<Path>) -> PathBuf {
|
||||
pub fn expand_dots(path: impl AsRef<Path>) -> PathBuf {
|
||||
// Check if the last component of the path is a normal component.
|
||||
fn last_component_is_normal(path: &Path) -> bool {
|
||||
matches!(path.components().last(), Some(Component::Normal(_)))
|
||||
matches!(path.components().next_back(), Some(Component::Normal(_)))
|
||||
}
|
||||
|
||||
let path = path.as_ref();
|
||||
@ -61,7 +61,7 @@ pub fn expand_dots(path: impl AsRef<Path>) -> PathBuf {
|
||||
// no-op
|
||||
}
|
||||
_ => {
|
||||
let prev_component = result.components().last();
|
||||
let prev_component = result.components().next_back();
|
||||
if prev_component == Some(Component::RootDir) && component == Component::ParentDir {
|
||||
continue;
|
||||
}
|
||||
|
@ -29,7 +29,7 @@ fn expand_tilde_with_home(path: impl AsRef<Path>, home: Option<PathBuf>) -> Path
|
||||
};
|
||||
}
|
||||
|
||||
let path_last_char = path.as_os_str().to_string_lossy().chars().last();
|
||||
let path_last_char = path.as_os_str().to_string_lossy().chars().next_back();
|
||||
let need_trailing_slash = path_last_char == Some('/') || path_last_char == Some('\\');
|
||||
|
||||
match home {
|
||||
@ -94,7 +94,7 @@ fn user_home_dir(username: &str) -> PathBuf {
|
||||
if !cfg!(target_os = "android")
|
||||
&& expected_path
|
||||
.components()
|
||||
.last()
|
||||
.next_back()
|
||||
.map(|last| last != Component::Normal(username.as_ref()))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
|
@ -884,7 +884,7 @@ impl<'a> StateWorkingSet<'a> {
|
||||
.active_overlay_names(&mut removed_overlays)
|
||||
.iter()
|
||||
.rev()
|
||||
.last()
|
||||
.next_back()
|
||||
{
|
||||
return last_name;
|
||||
}
|
||||
@ -900,7 +900,7 @@ impl<'a> StateWorkingSet<'a> {
|
||||
if let Some(last_overlay) = scope_frame
|
||||
.active_overlays(&mut removed_overlays)
|
||||
.rev()
|
||||
.last()
|
||||
.next_back()
|
||||
{
|
||||
return last_overlay;
|
||||
}
|
||||
|
@ -17,6 +17,8 @@ use std::{
|
||||
};
|
||||
|
||||
/// Create a Value for `$nu`.
|
||||
// Note: When adding new constants to $nu, please update the doc at https://nushell.sh/book/special_variables.html
|
||||
// or at least add a TODO/reminder issue in nushell.github.io so we don't lose track of it.
|
||||
pub(crate) fn create_nu_constant(engine_state: &EngineState, span: Span) -> Value {
|
||||
fn canonicalize_path(engine_state: &EngineState, path: &Path) -> PathBuf {
|
||||
#[allow(deprecated)]
|
||||
|
@ -88,7 +88,7 @@ pub fn lev_distance_with_substrings(a: &str, b: &str, limit: usize) -> Option<us
|
||||
1 // Exact substring match, but not a total word match so return non-zero
|
||||
} else if !big_len_diff {
|
||||
// Not a big difference in length, discount cost of length difference
|
||||
score + (len_diff + 1) / 2
|
||||
score + len_diff.div_ceil(2)
|
||||
} else {
|
||||
// A big difference in length, add back the difference in length to the score
|
||||
score + len_diff
|
||||
|
@ -133,7 +133,7 @@ pub trait FromValue: Sized {
|
||||
Type::Custom(
|
||||
any::type_name::<Self>()
|
||||
.split(':')
|
||||
.last()
|
||||
.next_back()
|
||||
.expect("str::split returns an iterator with at least one element")
|
||||
.to_string()
|
||||
.into_boxed_str(),
|
||||
|
@ -119,7 +119,7 @@ fn build_vertical_map(record: Record, config: &Config) -> TableValue {
|
||||
fn string_append_to_width(key: &mut String, max: usize) {
|
||||
let width = string_width(key);
|
||||
let rest = max - width;
|
||||
key.extend(std::iter::repeat(' ').take(rest));
|
||||
key.extend(std::iter::repeat_n(' ', rest));
|
||||
}
|
||||
|
||||
fn build_vertical_array(vals: Vec<Value>, config: &Config) -> TableValue {
|
||||
|
@ -93,7 +93,7 @@
|
||||
|
||||
use std::cmp::max;
|
||||
use std::fmt;
|
||||
use std::iter::repeat;
|
||||
use std::iter::repeat_n;
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
fn unicode_width_strip_ansi(astring: &str) -> usize {
|
||||
@ -290,7 +290,7 @@ impl Grid {
|
||||
}
|
||||
|
||||
fn column_widths(&self, num_lines: usize, num_columns: usize) -> Dimensions {
|
||||
let mut widths: Vec<Width> = repeat(0).take(num_columns).collect();
|
||||
let mut widths: Vec<Width> = repeat_n(0, num_columns).collect();
|
||||
for (index, cell) in self.cells.iter().enumerate() {
|
||||
let index = match self.options.direction {
|
||||
Direction::LeftToRight => index % num_columns,
|
||||
|
@ -317,7 +317,7 @@ impl NuDataFrame {
|
||||
let series = self.as_series(span)?;
|
||||
let column = conversion::create_column_from_series(&series, row, row + 1, span)?;
|
||||
|
||||
if column.len() == 0 {
|
||||
if column.is_empty() {
|
||||
Err(ShellError::AccessEmptyContent { span })
|
||||
} else {
|
||||
let value = column
|
||||
|
2
tests/fixtures/lsp/completion/command.nu
vendored
2
tests/fixtures/lsp/completion/command.nu
vendored
@ -1,6 +1,6 @@
|
||||
config n
|
||||
config n foo bar -
|
||||
config n foo bar l --l
|
||||
config n foo bar c --l
|
||||
|
||||
# detail
|
||||
def "config n foo bar" [
|
||||
|
2
tests/fixtures/lsp/completion/fallback.nu
vendored
2
tests/fixtures/lsp/completion/fallback.nu
vendored
@ -3,6 +3,6 @@ let greeting = "Hello"
|
||||
echo $gre
|
||||
| st
|
||||
|
||||
ls l
|
||||
ls c
|
||||
|
||||
$greeting not-h
|
||||
|
4
tests/fixtures/lsp/hints/signature.nu
vendored
4
tests/fixtures/lsp/hints/signature.nu
vendored
@ -11,7 +11,7 @@ foo bar 1 2 3
|
||||
foo baz 1 2 3
|
||||
def "foo baz" [
|
||||
p1: int
|
||||
p2: string,
|
||||
p3?: int = 1 # doc
|
||||
p2: string, # doc
|
||||
p3?: int = 1
|
||||
] {}
|
||||
echo
|
||||
|
Loading…
Reference in New Issue
Block a user