mirror of
https://github.com/nushell/nushell.git
synced 2025-04-29 15:44:28 +02:00
Merge branch 'main' into polars_categorical_2
This commit is contained in:
commit
4d5329b4d1
2
.github/workflows/typos.yml
vendored
2
.github/workflows/typos.yml
vendored
@ -10,4 +10,4 @@ jobs:
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Check spelling
|
||||
uses: crate-ci/typos@v1.31.0
|
||||
uses: crate-ci/typos@v1.31.1
|
||||
|
76
Cargo.lock
generated
76
Cargo.lock
generated
@ -206,9 +206,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "array-init-cursor"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf7d0a018de4f6aa429b9d33d69edf69072b1c5b1cb8d3e4a5f7ef898fc3eb76"
|
||||
checksum = "ed51fe0f224d1d4ea768be38c51f9f831dee9d05c163c11fba0b8c44387b1fc3"
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
@ -856,9 +856,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bytesize"
|
||||
version = "1.3.2"
|
||||
version = "1.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2d2c12f985c78475a6b8d629afd0c360260ef34cfef52efccdcfd31972f81c2e"
|
||||
checksum = "2e93abca9e28e0a1b9877922aacb20576e05d4679ffa78c3d6dc22a26a216659"
|
||||
|
||||
[[package]]
|
||||
name = "calamine"
|
||||
@ -2386,12 +2386,13 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
||||
|
||||
[[package]]
|
||||
name = "human-date-parser"
|
||||
version = "0.2.0"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1116cf4debfe770c12168458321c4a8591b71c4c19f7100de07c84cf81701c63"
|
||||
checksum = "406f83c56de4b2c9183be52ae9a4fefa22c0e0c3d3d7ef80be26eaee11c7110e"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"pest",
|
||||
"pest_consume",
|
||||
"pest_derive",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
@ -2808,9 +2809,9 @@ checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
|
||||
|
||||
[[package]]
|
||||
name = "is_debug"
|
||||
version = "1.0.2"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8ea828c9d6638a5bd3d8b14e37502b4d56cae910ccf8a5b7f51c7a0eb1d0508"
|
||||
checksum = "1fe266d2e243c931d8190177f20bf7f24eed45e96f39e87dc49a27b32d12d407"
|
||||
|
||||
[[package]]
|
||||
name = "is_executable"
|
||||
@ -3542,6 +3543,7 @@ dependencies = [
|
||||
"nu-path",
|
||||
"nu-plugin-engine",
|
||||
"nu-protocol",
|
||||
"nu-std",
|
||||
"nu-test-support",
|
||||
"nu-utils",
|
||||
"nucleo-matcher",
|
||||
@ -3701,8 +3703,8 @@ dependencies = [
|
||||
"print-positions",
|
||||
"procfs",
|
||||
"quick-xml 0.37.1",
|
||||
"rand 0.8.5",
|
||||
"rand_chacha 0.3.1",
|
||||
"rand 0.9.0",
|
||||
"rand_chacha 0.9.0",
|
||||
"rayon",
|
||||
"rmp",
|
||||
"roxmltree",
|
||||
@ -3827,6 +3829,7 @@ dependencies = [
|
||||
"nu-glob",
|
||||
"nu-parser",
|
||||
"nu-protocol",
|
||||
"nu-std",
|
||||
"nu-test-support",
|
||||
"nu-utils",
|
||||
"nucleo-matcher",
|
||||
@ -4675,6 +4678,28 @@ dependencies = [
|
||||
"ucd-trie",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pest_consume"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79447402d15d18e7142e14c72f2e63fa3d155be1bc5b70b3ccbb610ac55f536b"
|
||||
dependencies = [
|
||||
"pest",
|
||||
"pest_consume_macros",
|
||||
"pest_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pest_consume_macros"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d8630a7a899cb344ec1c16ba0a6b24240029af34bdc0a21f84e411d7f793f29"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pest_derive"
|
||||
version = "2.7.15"
|
||||
@ -6606,13 +6631,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "shadow-rs"
|
||||
version = "0.38.0"
|
||||
version = "1.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69d433b5df1e1958a668457ebe4a9c5b7bcfe844f4eb2276ac43cf273baddd54"
|
||||
checksum = "6d5625ed609cf66d7e505e7d487aca815626dc4ebb6c0dd07637ca61a44651a6"
|
||||
dependencies = [
|
||||
"const_format",
|
||||
"is_debug",
|
||||
"time",
|
||||
"tzdb",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -7448,6 +7474,32 @@ dependencies = [
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tz-rs"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e1450bf2b99397e72070e7935c89facaa80092ac812502200375f1f7d33c71a1"
|
||||
|
||||
[[package]]
|
||||
name = "tzdb"
|
||||
version = "0.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0be2ea5956f295449f47c0b825c5e109022ff1a6a53bb4f77682a87c2341fbf5"
|
||||
dependencies = [
|
||||
"iana-time-zone",
|
||||
"tz-rs",
|
||||
"tzdb_data",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tzdb_data"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0604b35c1f390a774fdb138cac75a99981078895d24bcab175987440bbff803b"
|
||||
dependencies = [
|
||||
"tz-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ucd-trie"
|
||||
version = "0.1.7"
|
||||
|
10
Cargo.toml
10
Cargo.toml
@ -10,7 +10,7 @@ homepage = "https://www.nushell.sh"
|
||||
license = "MIT"
|
||||
name = "nu"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
rust-version = "1.83.0"
|
||||
rust-version = "1.84.1"
|
||||
version = "0.103.1"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
@ -70,7 +70,7 @@ bracoxide = "0.1.5"
|
||||
brotli = "7.0"
|
||||
byteorder = "1.5"
|
||||
bytes = "1"
|
||||
bytesize = "1.3.1"
|
||||
bytesize = "1.3.3"
|
||||
calamine = "0.26.1"
|
||||
chardetng = "0.1.17"
|
||||
chrono = { default-features = false, version = "0.4.34" }
|
||||
@ -91,7 +91,7 @@ fancy-regex = "0.14"
|
||||
filesize = "0.2"
|
||||
filetime = "0.2"
|
||||
heck = "0.5.0"
|
||||
human-date-parser = "0.2.0"
|
||||
human-date-parser = "0.3.0"
|
||||
indexmap = "2.8"
|
||||
indicatif = "0.17"
|
||||
interprocess = "2.2.0"
|
||||
@ -135,9 +135,9 @@ quick-xml = "0.37.0"
|
||||
quickcheck = "1.0"
|
||||
quickcheck_macros = "1.0"
|
||||
quote = "1.0"
|
||||
rand = "0.8"
|
||||
rand = "0.9"
|
||||
getrandom = "0.2" # pick same version that rand requires
|
||||
rand_chacha = "0.3.1"
|
||||
rand_chacha = "0.9"
|
||||
ratatui = "0.29"
|
||||
rayon = "1.10"
|
||||
reedline = "0.39.0"
|
||||
|
@ -13,6 +13,7 @@ bench = false
|
||||
[dev-dependencies]
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.103.1" }
|
||||
nu-command = { path = "../nu-command", version = "0.103.1" }
|
||||
nu-std = { path = "../nu-std", version = "0.103.1" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.103.1" }
|
||||
rstest = { workspace = true, default-features = false }
|
||||
tempfile = { workspace = true }
|
||||
|
@ -1,21 +1,20 @@
|
||||
use crate::completions::{
|
||||
base::{SemanticSuggestion, SuggestionKind},
|
||||
AttributableCompletion, AttributeCompletion, CellPathCompletion, CommandCompletion, Completer,
|
||||
CompletionOptions, CustomCompletion, DirectoryCompletion, DotNuCompletion, FileCompletion,
|
||||
FlagCompletion, OperatorCompletion, VariableCompletion,
|
||||
CompletionOptions, CustomCompletion, DirectoryCompletion, DotNuCompletion,
|
||||
ExportableCompletion, FileCompletion, FlagCompletion, OperatorCompletion, VariableCompletion,
|
||||
};
|
||||
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::{flatten_expression, parse};
|
||||
use nu_parser::{flatten_expression, parse, parse_module_file_or_dir};
|
||||
use nu_protocol::{
|
||||
ast::{Argument, Block, Expr, Expression, FindMapResult, Traverse},
|
||||
ast::{Argument, Block, Expr, Expression, FindMapResult, ListItem, Traverse},
|
||||
debugger::WithoutDebug,
|
||||
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
||||
PipelineData, Span, Type, Value,
|
||||
};
|
||||
use reedline::{Completer as ReedlineCompleter, Suggestion};
|
||||
use std::{str, sync::Arc};
|
||||
|
||||
use super::base::{SemanticSuggestion, SuggestionKind};
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Used as the function `f` in find_map Traverse
|
||||
///
|
||||
@ -57,8 +56,13 @@ fn find_pipeline_element_by_position<'a>(
|
||||
Expr::FullCellPath(fcp) => fcp
|
||||
.head
|
||||
.find_map(working_set, &closure)
|
||||
.or(Some(expr))
|
||||
.map(FindMapResult::Found)
|
||||
// e.g. use std/util [<tab>
|
||||
.or_else(|| {
|
||||
(fcp.head.span.contains(pos) && matches!(fcp.head.expr, Expr::List(_)))
|
||||
.then_some(FindMapResult::Continue)
|
||||
})
|
||||
.or(Some(FindMapResult::Found(expr)))
|
||||
.unwrap_or_default(),
|
||||
Expr::Var(_) => FindMapResult::Found(expr),
|
||||
Expr::AttributeBlock(ab) => ab
|
||||
@ -127,6 +131,18 @@ struct Context<'a> {
|
||||
offset: usize,
|
||||
}
|
||||
|
||||
/// For argument completion
|
||||
struct PositionalArguments<'a> {
|
||||
/// command name
|
||||
command_head: &'a str,
|
||||
/// indices of positional arguments
|
||||
positional_arg_indices: Vec<usize>,
|
||||
/// argument list
|
||||
arguments: &'a [Argument],
|
||||
/// expression of current argument
|
||||
expr: &'a Expression,
|
||||
}
|
||||
|
||||
impl Context<'_> {
|
||||
fn new<'a>(
|
||||
working_set: &'a StateWorkingSet,
|
||||
@ -328,7 +344,8 @@ impl NuCompleter {
|
||||
// NOTE: the argument to complete is not necessarily the last one
|
||||
// for lsp completion, we don't trim the text,
|
||||
// so that `def`s after pos can be completed
|
||||
for arg in call.arguments.iter() {
|
||||
let mut positional_arg_indices = Vec::new();
|
||||
for (arg_idx, arg) in call.arguments.iter().enumerate() {
|
||||
let span = arg.span();
|
||||
if span.contains(pos) {
|
||||
// if customized completion specified, it has highest priority
|
||||
@ -378,10 +395,16 @@ impl NuCompleter {
|
||||
Argument::Positional(_) if prefix == b"-" => flag_completion_helper(),
|
||||
// complete according to expression type and command head
|
||||
Argument::Positional(expr) => {
|
||||
let command_head = working_set.get_span_contents(call.head);
|
||||
let command_head = working_set.get_decl(call.decl_id).name();
|
||||
positional_arg_indices.push(arg_idx);
|
||||
self.argument_completion_helper(
|
||||
PositionalArguments {
|
||||
command_head,
|
||||
positional_arg_indices,
|
||||
arguments: &call.arguments,
|
||||
expr,
|
||||
},
|
||||
pos,
|
||||
&ctx,
|
||||
suggestions.is_empty(),
|
||||
)
|
||||
@ -389,6 +412,8 @@ impl NuCompleter {
|
||||
_ => vec![],
|
||||
});
|
||||
break;
|
||||
} else if !matches!(arg, Argument::Named(_)) {
|
||||
positional_arg_indices.push(arg_idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -498,20 +523,97 @@ impl NuCompleter {
|
||||
|
||||
fn argument_completion_helper(
|
||||
&self,
|
||||
command_head: &[u8],
|
||||
expr: &Expression,
|
||||
argument_info: PositionalArguments,
|
||||
pos: usize,
|
||||
ctx: &Context,
|
||||
need_fallback: bool,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let PositionalArguments {
|
||||
command_head,
|
||||
positional_arg_indices,
|
||||
arguments,
|
||||
expr,
|
||||
} = argument_info;
|
||||
// special commands
|
||||
match command_head {
|
||||
// complete module file/directory
|
||||
// TODO: if module file already specified,
|
||||
// should parse it to get modules/commands/consts to complete
|
||||
b"use" | b"export use" | b"overlay use" | b"source-env" => {
|
||||
return self.process_completion(&mut DotNuCompletion, ctx);
|
||||
"use" | "export use" | "overlay use" | "source-env"
|
||||
if positional_arg_indices.len() == 1 =>
|
||||
{
|
||||
return self.process_completion(
|
||||
&mut DotNuCompletion {
|
||||
std_virtual_path: command_head != "source-env",
|
||||
},
|
||||
ctx,
|
||||
);
|
||||
}
|
||||
b"which" => {
|
||||
// NOTE: if module file already specified,
|
||||
// should parse it to get modules/commands/consts to complete
|
||||
"use" | "export use" => {
|
||||
let Some(Argument::Positional(Expression {
|
||||
expr: Expr::String(module_name),
|
||||
span,
|
||||
..
|
||||
})) = positional_arg_indices
|
||||
.first()
|
||||
.and_then(|i| arguments.get(*i))
|
||||
else {
|
||||
return vec![];
|
||||
};
|
||||
let module_name = module_name.as_bytes();
|
||||
let (module_id, temp_working_set) = match ctx.working_set.find_module(module_name) {
|
||||
Some(module_id) => (module_id, None),
|
||||
None => {
|
||||
let mut temp_working_set =
|
||||
StateWorkingSet::new(ctx.working_set.permanent_state);
|
||||
let Some(module_id) = parse_module_file_or_dir(
|
||||
&mut temp_working_set,
|
||||
module_name,
|
||||
*span,
|
||||
None,
|
||||
) else {
|
||||
return vec![];
|
||||
};
|
||||
(module_id, Some(temp_working_set))
|
||||
}
|
||||
};
|
||||
let mut exportable_completion = ExportableCompletion {
|
||||
module_id,
|
||||
temp_working_set,
|
||||
};
|
||||
let mut complete_on_list_items = |items: &[ListItem]| -> Vec<SemanticSuggestion> {
|
||||
for item in items {
|
||||
let span = item.expr().span;
|
||||
if span.contains(pos) {
|
||||
let offset = span.start.saturating_sub(ctx.span.start);
|
||||
let end_offset =
|
||||
ctx.prefix.len().min(pos.min(span.end) - ctx.span.start + 1);
|
||||
let new_ctx = Context::new(
|
||||
ctx.working_set,
|
||||
Span::new(span.start, ctx.span.end.min(span.end)),
|
||||
ctx.prefix.get(offset..end_offset).unwrap_or_default(),
|
||||
ctx.offset,
|
||||
);
|
||||
return self.process_completion(&mut exportable_completion, &new_ctx);
|
||||
}
|
||||
}
|
||||
vec![]
|
||||
};
|
||||
|
||||
match &expr.expr {
|
||||
Expr::String(_) => {
|
||||
return self.process_completion(&mut exportable_completion, ctx);
|
||||
}
|
||||
Expr::FullCellPath(fcp) => match &fcp.head.expr {
|
||||
Expr::List(items) => {
|
||||
return complete_on_list_items(items);
|
||||
}
|
||||
_ => return vec![],
|
||||
},
|
||||
_ => return vec![],
|
||||
}
|
||||
}
|
||||
"which" => {
|
||||
let mut completer = CommandCompletion {
|
||||
internals: true,
|
||||
externals: true,
|
||||
|
@ -22,18 +22,22 @@ pub struct PathBuiltFromString {
|
||||
/// Recursively goes through paths that match a given `partial`.
|
||||
/// built: State struct for a valid matching path built so far.
|
||||
///
|
||||
/// `want_directory`: Whether we want only directories as completion matches.
|
||||
/// Some commands like `cd` can only be run on directories whereas others
|
||||
/// like `ls` can be run on regular files as well.
|
||||
///
|
||||
/// `isdir`: whether the current partial path has a trailing slash.
|
||||
/// Parsing a path string into a pathbuf loses that bit of information.
|
||||
///
|
||||
/// want_directory: Whether we want only directories as completion matches.
|
||||
/// Some commands like `cd` can only be run on directories whereas others
|
||||
/// like `ls` can be run on regular files as well.
|
||||
/// `enable_exact_match`: Whether match algorithm is Prefix and all previous components
|
||||
/// of the path matched a directory exactly.
|
||||
fn complete_rec(
|
||||
partial: &[&str],
|
||||
built_paths: &[PathBuiltFromString],
|
||||
options: &CompletionOptions,
|
||||
want_directory: bool,
|
||||
isdir: bool,
|
||||
enable_exact_match: bool,
|
||||
) -> Vec<PathBuiltFromString> {
|
||||
if let Some((&base, rest)) = partial.split_first() {
|
||||
if base.chars().all(|c| c == '.') && (isdir || !rest.is_empty()) {
|
||||
@ -46,7 +50,14 @@ fn complete_rec(
|
||||
built
|
||||
})
|
||||
.collect();
|
||||
return complete_rec(rest, &built_paths, options, want_directory, isdir);
|
||||
return complete_rec(
|
||||
rest,
|
||||
&built_paths,
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
enable_exact_match,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -86,27 +97,26 @@ fn complete_rec(
|
||||
// Serves as confirmation to ignore longer completions for
|
||||
// components in between.
|
||||
if !rest.is_empty() || isdir {
|
||||
// Don't show longer completions if we have an exact match (#13204, #14794)
|
||||
let exact_match = enable_exact_match
|
||||
&& (if options.case_sensitive {
|
||||
entry_name.eq(base)
|
||||
} else {
|
||||
entry_name.eq_ignore_case(base)
|
||||
});
|
||||
completions.extend(complete_rec(
|
||||
rest,
|
||||
&[built],
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
exact_match,
|
||||
));
|
||||
} else {
|
||||
completions.push(built);
|
||||
}
|
||||
|
||||
// For https://github.com/nushell/nushell/issues/13204
|
||||
if isdir && options.match_algorithm == MatchAlgorithm::Prefix {
|
||||
let exact_match = if options.case_sensitive {
|
||||
entry_name.eq(base)
|
||||
} else {
|
||||
entry_name.to_folded_case().eq(&base.to_folded_case())
|
||||
};
|
||||
if exact_match {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
completions.push(built);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
@ -140,7 +150,7 @@ impl OriginalCwd {
|
||||
}
|
||||
}
|
||||
|
||||
fn surround_remove(partial: &str) -> String {
|
||||
pub fn surround_remove(partial: &str) -> String {
|
||||
for c in ['`', '"', '\''] {
|
||||
if partial.starts_with(c) {
|
||||
let ret = partial.strip_prefix(c).unwrap_or(partial);
|
||||
@ -255,6 +265,7 @@ pub fn complete_item(
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
options.match_algorithm == MatchAlgorithm::Prefix,
|
||||
)
|
||||
.into_iter()
|
||||
.map(|mut p| {
|
||||
|
@ -1,18 +1,23 @@
|
||||
use crate::completions::{file_path_completion, Completer, CompletionOptions};
|
||||
use crate::completions::{
|
||||
completion_common::{surround_remove, FileSuggestion},
|
||||
completion_options::NuMatcher,
|
||||
file_path_completion, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
|
||||
};
|
||||
use nu_path::expand_tilde;
|
||||
use nu_protocol::{
|
||||
engine::{Stack, StateWorkingSet},
|
||||
engine::{Stack, StateWorkingSet, VirtualPath},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
path::{is_separator, PathBuf, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR},
|
||||
path::{is_separator, PathBuf, MAIN_SEPARATOR_STR},
|
||||
};
|
||||
|
||||
use super::{SemanticSuggestion, SuggestionKind};
|
||||
|
||||
pub struct DotNuCompletion;
|
||||
pub struct DotNuCompletion {
|
||||
/// e.g. use std/a<tab>
|
||||
pub std_virtual_path: bool,
|
||||
}
|
||||
|
||||
impl Completer for DotNuCompletion {
|
||||
fn fetch(
|
||||
@ -102,7 +107,7 @@ impl Completer for DotNuCompletion {
|
||||
|
||||
// Fetch the files filtering the ones that ends with .nu
|
||||
// and transform them into suggestions
|
||||
let completions = file_path_completion(
|
||||
let mut completions = file_path_completion(
|
||||
span,
|
||||
partial,
|
||||
&search_dirs
|
||||
@ -113,17 +118,60 @@ impl Completer for DotNuCompletion {
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
);
|
||||
|
||||
if self.std_virtual_path {
|
||||
let mut matcher = NuMatcher::new(partial, options);
|
||||
let base_dir = surround_remove(&base_dir);
|
||||
if base_dir == "." {
|
||||
let surround_prefix = partial
|
||||
.chars()
|
||||
.take_while(|c| "`'\"".contains(*c))
|
||||
.collect::<String>();
|
||||
for path in ["std", "std-rfc"] {
|
||||
let path = format!("{}{}", surround_prefix, path);
|
||||
matcher.add(
|
||||
path.clone(),
|
||||
FileSuggestion {
|
||||
span,
|
||||
path,
|
||||
style: None,
|
||||
is_dir: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
} else if let Some(VirtualPath::Dir(sub_paths)) =
|
||||
working_set.find_virtual_path(&base_dir)
|
||||
{
|
||||
for sub_vp_id in sub_paths {
|
||||
let (path, sub_vp) = working_set.get_virtual_path(*sub_vp_id);
|
||||
let path = path
|
||||
.strip_prefix(&format!("{}/", base_dir))
|
||||
.unwrap_or(path)
|
||||
.to_string();
|
||||
matcher.add(
|
||||
path.clone(),
|
||||
FileSuggestion {
|
||||
path,
|
||||
span,
|
||||
style: None,
|
||||
is_dir: matches!(sub_vp, VirtualPath::Dir(_)),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
completions.extend(matcher.results());
|
||||
}
|
||||
|
||||
completions
|
||||
.into_iter()
|
||||
// Different base dir, so we list the .nu files or folders
|
||||
.filter(|it| {
|
||||
// for paths with spaces in them
|
||||
let path = it.path.trim_end_matches('`');
|
||||
path.ends_with(".nu") || path.ends_with(SEP)
|
||||
path.ends_with(".nu") || it.is_dir
|
||||
})
|
||||
.map(|x| {
|
||||
let append_whitespace =
|
||||
x.path.ends_with(".nu") && (!start_with_backquote || end_with_backquote);
|
||||
let append_whitespace = !x.is_dir && (!start_with_backquote || end_with_backquote);
|
||||
// Re-calculate the span to replace
|
||||
let mut span_offset = 0;
|
||||
let mut value = x.path.to_string();
|
||||
|
111
crates/nu-cli/src/completions/exportable_completions.rs
Normal file
111
crates/nu-cli/src/completions/exportable_completions.rs
Normal file
@ -0,0 +1,111 @@
|
||||
use crate::completions::{
|
||||
completion_common::surround_remove, completion_options::NuMatcher, Completer,
|
||||
CompletionOptions, SemanticSuggestion, SuggestionKind,
|
||||
};
|
||||
use nu_protocol::{
|
||||
engine::{Stack, StateWorkingSet},
|
||||
ModuleId, Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
pub struct ExportableCompletion<'a> {
|
||||
pub module_id: ModuleId,
|
||||
pub temp_working_set: Option<StateWorkingSet<'a>>,
|
||||
}
|
||||
|
||||
/// If name contains space, wrap it in quotes
|
||||
fn wrapped_name(name: String) -> String {
|
||||
if !name.contains(' ') {
|
||||
return name;
|
||||
}
|
||||
if name.contains('\'') {
|
||||
format!("\"{}\"", name.replace('"', r#"\""#))
|
||||
} else {
|
||||
format!("'{name}'")
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for ExportableCompletion<'_> {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut matcher = NuMatcher::<()>::new(surround_remove(prefix.as_ref()), options);
|
||||
let mut results = Vec::new();
|
||||
let span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
};
|
||||
// TODO: use matcher.add_lazy to lazy evaluate an item if it matches the prefix
|
||||
let mut add_suggestion = |value: String,
|
||||
description: Option<String>,
|
||||
extra: Option<Vec<String>>,
|
||||
kind: SuggestionKind| {
|
||||
results.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value,
|
||||
span,
|
||||
description,
|
||||
extra,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(kind),
|
||||
});
|
||||
};
|
||||
|
||||
let working_set = self.temp_working_set.as_ref().unwrap_or(working_set);
|
||||
let module = working_set.get_module(self.module_id);
|
||||
|
||||
for (name, decl_id) in &module.decls {
|
||||
let name = String::from_utf8_lossy(name).to_string();
|
||||
if matcher.matches(&name) {
|
||||
let cmd = working_set.get_decl(*decl_id);
|
||||
add_suggestion(
|
||||
wrapped_name(name),
|
||||
Some(cmd.description().to_string()),
|
||||
None,
|
||||
SuggestionKind::Command(cmd.command_type()),
|
||||
);
|
||||
}
|
||||
}
|
||||
for (name, module_id) in &module.submodules {
|
||||
let name = String::from_utf8_lossy(name).to_string();
|
||||
if matcher.matches(&name) {
|
||||
let comments = working_set.get_module_comments(*module_id).map(|spans| {
|
||||
spans
|
||||
.iter()
|
||||
.map(|sp| {
|
||||
String::from_utf8_lossy(working_set.get_span_contents(*sp)).into()
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
});
|
||||
add_suggestion(
|
||||
wrapped_name(name),
|
||||
Some("Submodule".into()),
|
||||
comments,
|
||||
SuggestionKind::Module,
|
||||
);
|
||||
}
|
||||
}
|
||||
for (name, var_id) in &module.constants {
|
||||
let name = String::from_utf8_lossy(name).to_string();
|
||||
if matcher.matches(&name) {
|
||||
let var = working_set.get_variable(*var_id);
|
||||
add_suggestion(
|
||||
wrapped_name(name),
|
||||
var.const_val
|
||||
.as_ref()
|
||||
.and_then(|v| v.clone().coerce_into_string().ok()),
|
||||
None,
|
||||
SuggestionKind::Variable,
|
||||
);
|
||||
}
|
||||
}
|
||||
results
|
||||
}
|
||||
}
|
@ -1,12 +1,12 @@
|
||||
use crate::completions::{completion_options::NuMatcher, Completer, CompletionOptions};
|
||||
use crate::completions::{
|
||||
completion_options::NuMatcher, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
|
||||
};
|
||||
use nu_protocol::{
|
||||
engine::{Stack, StateWorkingSet},
|
||||
DeclId, Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
use super::{SemanticSuggestion, SuggestionKind};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FlagCompletion {
|
||||
pub decl_id: DeclId,
|
||||
|
@ -8,6 +8,7 @@ mod completion_options;
|
||||
mod custom_completions;
|
||||
mod directory_completions;
|
||||
mod dotnu_completions;
|
||||
mod exportable_completions;
|
||||
mod file_completions;
|
||||
mod flag_completions;
|
||||
mod operator_completions;
|
||||
@ -22,6 +23,7 @@ pub use completion_options::{CompletionOptions, MatchAlgorithm};
|
||||
pub use custom_completions::CustomCompletion;
|
||||
pub use directory_completions::DirectoryCompletion;
|
||||
pub use dotnu_completions::DotNuCompletion;
|
||||
pub use exportable_completions::ExportableCompletion;
|
||||
pub use file_completions::{file_path_completion, FileCompletion};
|
||||
pub use flag_completions::FlagCompletion;
|
||||
pub use operator_completions::OperatorCompletion;
|
||||
|
@ -11,6 +11,7 @@ use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_path::expand_tilde;
|
||||
use nu_protocol::{debugger::WithoutDebug, engine::StateWorkingSet, Config, PipelineData};
|
||||
use nu_std::load_standard_library;
|
||||
use reedline::{Completer, Suggestion};
|
||||
use rstest::{fixture, rstest};
|
||||
use support::{
|
||||
@ -513,7 +514,7 @@ fn dotnu_completions() {
|
||||
|
||||
match_suggestions(&vec!["sub.nu`"], &suggestions);
|
||||
|
||||
let expected = vec![
|
||||
let mut expected = vec![
|
||||
"asdf.nu",
|
||||
"bar.nu",
|
||||
"bat.nu",
|
||||
@ -546,6 +547,8 @@ fn dotnu_completions() {
|
||||
match_suggestions(&expected, &suggestions);
|
||||
|
||||
// Test use completion
|
||||
expected.push("std");
|
||||
expected.push("std-rfc");
|
||||
let completion_str = "use ";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
|
||||
@ -577,6 +580,66 @@ fn dotnu_completions() {
|
||||
match_dir_content_for_dotnu(dir_content, &suggestions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dotnu_stdlib_completions() {
|
||||
let (_, _, mut engine, stack) = new_dotnu_engine();
|
||||
assert!(load_standard_library(&mut engine).is_ok());
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
// `export use` should be recognized as command `export use`
|
||||
let completion_str = "export use std/ass";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
match_suggestions(&vec!["assert"], &suggestions);
|
||||
|
||||
let completion_str = "use `std-rfc/cli";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
match_suggestions(&vec!["clip"], &suggestions);
|
||||
|
||||
let completion_str = "use \"std";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
match_suggestions(&vec!["\"std", "\"std-rfc"], &suggestions);
|
||||
|
||||
let completion_str = "overlay use \'std-rfc/cli";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
match_suggestions(&vec!["clip"], &suggestions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exportable_completions() {
|
||||
let (_, _, mut engine, mut stack) = new_dotnu_engine();
|
||||
let code = r#"export module "🤔🐘" {
|
||||
export const foo = "🤔🐘";
|
||||
}"#;
|
||||
assert!(support::merge_input(code.as_bytes(), &mut engine, &mut stack).is_ok());
|
||||
assert!(load_standard_library(&mut engine).is_ok());
|
||||
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let completion_str = "use std null";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
match_suggestions(&vec!["null-device", "null_device"], &suggestions);
|
||||
|
||||
let completion_str = "export use std/assert eq";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
match_suggestions(&vec!["equal"], &suggestions);
|
||||
|
||||
let completion_str = "use std/assert \"not eq";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
match_suggestions(&vec!["'not equal'"], &suggestions);
|
||||
|
||||
let completion_str = "use std-rfc/clip ['prefi";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
match_suggestions(&vec!["prefix"], &suggestions);
|
||||
|
||||
let completion_str = "use std/math [E, `TAU";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
match_suggestions(&vec!["TAU"], &suggestions);
|
||||
|
||||
let completion_str = "use 🤔🐘 'foo";
|
||||
let suggestions = completer.complete(completion_str, completion_str.len());
|
||||
match_suggestions(&vec!["foo"], &suggestions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dotnu_completions_const_nu_lib_dirs() {
|
||||
let (_, _, engine, stack) = new_dotnu_engine();
|
||||
@ -951,10 +1014,11 @@ fn partial_completions() {
|
||||
// Create the expected values
|
||||
let expected_paths = [
|
||||
file(dir.join("partial").join("hello.txt")),
|
||||
folder(dir.join("partial").join("hol")),
|
||||
file(dir.join("partial-a").join("have_ext.exe")),
|
||||
file(dir.join("partial-a").join("have_ext.txt")),
|
||||
file(dir.join("partial-a").join("hello")),
|
||||
file(dir.join("partial-a").join("hola")),
|
||||
folder(dir.join("partial-a").join("hola")),
|
||||
file(dir.join("partial-b").join("hello_b")),
|
||||
file(dir.join("partial-b").join("hi_b")),
|
||||
file(dir.join("partial-c").join("hello_c")),
|
||||
@ -971,11 +1035,12 @@ fn partial_completions() {
|
||||
// Create the expected values
|
||||
let expected_paths = [
|
||||
file(dir.join("partial").join("hello.txt")),
|
||||
folder(dir.join("partial").join("hol")),
|
||||
file(dir.join("partial-a").join("anotherfile")),
|
||||
file(dir.join("partial-a").join("have_ext.exe")),
|
||||
file(dir.join("partial-a").join("have_ext.txt")),
|
||||
file(dir.join("partial-a").join("hello")),
|
||||
file(dir.join("partial-a").join("hola")),
|
||||
folder(dir.join("partial-a").join("hola")),
|
||||
file(dir.join("partial-b").join("hello_b")),
|
||||
file(dir.join("partial-b").join("hi_b")),
|
||||
file(dir.join("partial-c").join("hello_c")),
|
||||
@ -2215,15 +2280,43 @@ fn exact_match() {
|
||||
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
// Troll case to test if exact match logic works case insensitively
|
||||
let target_dir = format!("open {}", folder(dir.join("pArTiAL")));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Since it's an exact match, only 'partial' should be suggested, not
|
||||
// 'partial-a' and stuff. Implemented in #13302
|
||||
match_suggestions(
|
||||
&vec![file(dir.join("partial").join("hello.txt")).as_str()],
|
||||
&vec![
|
||||
file(dir.join("partial").join("hello.txt")).as_str(),
|
||||
folder(dir.join("partial").join("hol")).as_str(),
|
||||
],
|
||||
&suggestions,
|
||||
);
|
||||
|
||||
let target_dir = format!("open {}", file(dir.join("partial").join("h")));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
match_suggestions(
|
||||
&vec![
|
||||
file(dir.join("partial").join("hello.txt")).as_str(),
|
||||
folder(dir.join("partial").join("hol")).as_str(),
|
||||
],
|
||||
&suggestions,
|
||||
);
|
||||
|
||||
// Even though "hol" is an exact match, the first component ("part") wasn't an
|
||||
// exact match, so we include partial-a/hola
|
||||
let target_dir = format!("open {}", file(dir.join("part").join("hol")));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
match_suggestions(
|
||||
&vec![
|
||||
folder(dir.join("partial").join("hol")).as_str(),
|
||||
folder(dir.join("partial-a").join("hola")).as_str(),
|
||||
],
|
||||
&suggestions,
|
||||
);
|
||||
|
||||
// Exact match behavior shouldn't be enabled if the path has no slashes
|
||||
let target_dir = format!("open {}", file(dir.join("partial")));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
assert!(suggestions.len() > 1);
|
||||
}
|
||||
|
||||
#[ignore = "was reverted, still needs fixing"]
|
||||
|
@ -21,10 +21,10 @@ nu-protocol = { path = "../nu-protocol", version = "0.103.1", default-features =
|
||||
nu-utils = { path = "../nu-utils", version = "0.103.1", default-features = false }
|
||||
|
||||
itertools = { workspace = true }
|
||||
shadow-rs = { version = "0.38", default-features = false }
|
||||
shadow-rs = { version = "1.1", default-features = false }
|
||||
|
||||
[build-dependencies]
|
||||
shadow-rs = { version = "0.38", default-features = false }
|
||||
shadow-rs = { version = "1.1", default-features = false, features = ["build"] }
|
||||
|
||||
[dev-dependencies]
|
||||
quickcheck = { workspace = true }
|
||||
|
@ -31,16 +31,6 @@ impl Command for Do {
|
||||
"ignore errors as the closure runs",
|
||||
Some('i'),
|
||||
)
|
||||
.switch(
|
||||
"ignore-shell-errors",
|
||||
"ignore shell errors as the closure runs",
|
||||
Some('s'),
|
||||
)
|
||||
.switch(
|
||||
"ignore-program-errors",
|
||||
"ignore external program errors as the closure runs",
|
||||
Some('p'),
|
||||
)
|
||||
.switch(
|
||||
"capture-errors",
|
||||
"catch errors as the closure runs, and return them",
|
||||
@ -71,36 +61,6 @@ impl Command for Do {
|
||||
let rest: Vec<Value> = call.rest(engine_state, caller_stack, 1)?;
|
||||
let ignore_all_errors = call.has_flag(engine_state, caller_stack, "ignore-errors")?;
|
||||
|
||||
if call.has_flag(engine_state, caller_stack, "ignore-shell-errors")? {
|
||||
nu_protocol::report_shell_warning(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: "Deprecated option".into(),
|
||||
msg: "`--ignore-shell-errors` is deprecated and will be removed in 0.102.0."
|
||||
.into(),
|
||||
span: Some(call.head),
|
||||
help: Some("Please use the `--ignore-errors(-i)`".into()),
|
||||
inner: vec![],
|
||||
},
|
||||
);
|
||||
}
|
||||
if call.has_flag(engine_state, caller_stack, "ignore-program-errors")? {
|
||||
nu_protocol::report_shell_warning(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: "Deprecated option".into(),
|
||||
msg: "`--ignore-program-errors` is deprecated and will be removed in 0.102.0."
|
||||
.into(),
|
||||
span: Some(call.head),
|
||||
help: Some("Please use the `--ignore-errors(-i)`".into()),
|
||||
inner: vec![],
|
||||
},
|
||||
);
|
||||
}
|
||||
let ignore_shell_errors = ignore_all_errors
|
||||
|| call.has_flag(engine_state, caller_stack, "ignore-shell-errors")?;
|
||||
let ignore_program_errors = ignore_all_errors
|
||||
|| call.has_flag(engine_state, caller_stack, "ignore-program-errors")?;
|
||||
let capture_errors = call.has_flag(engine_state, caller_stack, "capture-errors")?;
|
||||
let has_env = call.has_flag(engine_state, caller_stack, "env")?;
|
||||
|
||||
@ -206,7 +166,7 @@ impl Command for Do {
|
||||
}
|
||||
}
|
||||
Ok(PipelineData::ByteStream(mut stream, metadata))
|
||||
if ignore_program_errors
|
||||
if ignore_all_errors
|
||||
&& !matches!(
|
||||
caller_stack.stdout(),
|
||||
OutDest::Pipe | OutDest::PipeSeparate | OutDest::Value
|
||||
@ -218,10 +178,10 @@ impl Command for Do {
|
||||
}
|
||||
Ok(PipelineData::ByteStream(stream, metadata))
|
||||
}
|
||||
Ok(PipelineData::Value(Value::Error { .. }, ..)) | Err(_) if ignore_shell_errors => {
|
||||
Ok(PipelineData::Value(Value::Error { .. }, ..)) | Err(_) if ignore_all_errors => {
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
Ok(PipelineData::ListStream(stream, metadata)) if ignore_shell_errors => {
|
||||
Ok(PipelineData::ListStream(stream, metadata)) if ignore_all_errors => {
|
||||
let stream = stream.map(move |value| {
|
||||
if let Value::Error { .. } = value {
|
||||
Value::nothing(head)
|
||||
|
@ -294,7 +294,7 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
|
||||
match parse_date_from_string(&input_val, span) {
|
||||
Ok(date) => return Value::date(date, span),
|
||||
Err(_) => {
|
||||
if let Ok(date) = from_human_time(&input_val) {
|
||||
if let Ok(date) = from_human_time(&input_val, Local::now().naive_local()) {
|
||||
match date {
|
||||
ParseResult::Date(date) => {
|
||||
let time = Local::now().time();
|
||||
@ -307,7 +307,29 @@ fn action(input: &Value, args: &Arguments, head: Span) -> Value {
|
||||
return Value::date(dt_fixed, span);
|
||||
}
|
||||
ParseResult::DateTime(date) => {
|
||||
return Value::date(date.fixed_offset(), span)
|
||||
let local_offset = *Local::now().offset();
|
||||
let dt_fixed = match local_offset.from_local_datetime(&date) {
|
||||
chrono::LocalResult::Single(dt) => dt,
|
||||
chrono::LocalResult::Ambiguous(_, _) => {
|
||||
return Value::error(
|
||||
ShellError::DatetimeParseError {
|
||||
msg: "Ambiguous datetime".to_string(),
|
||||
span,
|
||||
},
|
||||
span,
|
||||
);
|
||||
}
|
||||
chrono::LocalResult::None => {
|
||||
return Value::error(
|
||||
ShellError::DatetimeParseError {
|
||||
msg: "Invalid datetime".to_string(),
|
||||
span,
|
||||
},
|
||||
span,
|
||||
);
|
||||
}
|
||||
};
|
||||
return Value::date(dt_fixed, span);
|
||||
}
|
||||
ParseResult::Time(time) => {
|
||||
let date = Local::now().date_naive();
|
||||
|
@ -15,6 +15,7 @@ impl Command for IntoDuration {
|
||||
Signature::build("into duration")
|
||||
.input_output_types(vec![
|
||||
(Type::Int, Type::Duration),
|
||||
(Type::Float, Type::Duration),
|
||||
(Type::String, Type::Duration),
|
||||
(Type::Duration, Type::Duration),
|
||||
(Type::table(), Type::table()),
|
||||
@ -109,6 +110,11 @@ impl Command for IntoDuration {
|
||||
example: "1_234 | into duration --unit ms",
|
||||
result: Some(Value::test_duration(1_234 * 1_000_000)),
|
||||
},
|
||||
Example {
|
||||
description: "Convert a floating point number of an arbitrary unit to duration",
|
||||
example: "1.234 | into duration --unit sec",
|
||||
result: Some(Value::test_duration(1_234 * 1_000_000)),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -236,22 +242,22 @@ fn action(input: &Value, unit: &str, span: Span) -> Value {
|
||||
let value_span = input.span();
|
||||
match input {
|
||||
Value::Duration { .. } => input.clone(),
|
||||
Value::String { val, .. } => match compound_to_duration(val, value_span) {
|
||||
Value::String { val, .. } => {
|
||||
if let Ok(num) = val.parse::<f64>() {
|
||||
let ns = unit_to_ns_factor(unit);
|
||||
return Value::duration((num * (ns as f64)) as i64, span);
|
||||
}
|
||||
match compound_to_duration(val, value_span) {
|
||||
Ok(val) => Value::duration(val, span),
|
||||
Err(error) => Value::error(error, span),
|
||||
},
|
||||
}
|
||||
}
|
||||
Value::Float { val, .. } => {
|
||||
let ns = unit_to_ns_factor(unit);
|
||||
Value::duration((*val * (ns as f64)) as i64, span)
|
||||
}
|
||||
Value::Int { val, .. } => {
|
||||
let ns = match unit {
|
||||
"ns" => 1,
|
||||
"us" | "µs" => 1_000,
|
||||
"ms" => 1_000_000,
|
||||
"sec" => NS_PER_SEC,
|
||||
"min" => NS_PER_SEC * 60,
|
||||
"hr" => NS_PER_SEC * 60 * 60,
|
||||
"day" => NS_PER_SEC * 60 * 60 * 24,
|
||||
"wk" => NS_PER_SEC * 60 * 60 * 24 * 7,
|
||||
_ => 0,
|
||||
};
|
||||
let ns = unit_to_ns_factor(unit);
|
||||
Value::duration(*val * ns, span)
|
||||
}
|
||||
// Propagate errors by explicitly matching them before the final case.
|
||||
@ -268,6 +274,20 @@ fn action(input: &Value, unit: &str, span: Span) -> Value {
|
||||
}
|
||||
}
|
||||
|
||||
fn unit_to_ns_factor(unit: &str) -> i64 {
|
||||
match unit {
|
||||
"ns" => 1,
|
||||
"us" | "µs" => 1_000,
|
||||
"ms" => 1_000_000,
|
||||
"sec" => NS_PER_SEC,
|
||||
"min" => NS_PER_SEC * 60,
|
||||
"hr" => NS_PER_SEC * 60 * 60,
|
||||
"day" => NS_PER_SEC * 60 * 60 * 24,
|
||||
"wk" => NS_PER_SEC * 60 * 60 * 24 * 7,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
@ -30,6 +30,11 @@ impl Command for All {
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Check if a list contains only true values",
|
||||
example: "[false true true false] | all {}",
|
||||
result: Some(Value::test_bool(false)),
|
||||
},
|
||||
Example {
|
||||
description: "Check if each row's status is the string 'UP'",
|
||||
example: "[[status]; [UP] [UP]] | all {|el| $el.status == UP }",
|
||||
|
@ -30,6 +30,11 @@ impl Command for Any {
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Check if a list contains any true values",
|
||||
example: "[false true true false] | any {}",
|
||||
result: Some(Value::test_bool(true)),
|
||||
},
|
||||
Example {
|
||||
description: "Check if any row's status is the string 'DOWN'",
|
||||
example: "[[status]; [UP] [DOWN] [UP]] | any {|el| $el.status == DOWN }",
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use rand::{prelude::SliceRandom, thread_rng};
|
||||
use rand::{prelude::SliceRandom, rng};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Shuffle;
|
||||
@ -31,7 +31,7 @@ impl Command for Shuffle {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let metadata = input.metadata();
|
||||
let mut values = input.into_iter_strict(call.head)?.collect::<Vec<_>>();
|
||||
values.shuffle(&mut thread_rng());
|
||||
values.shuffle(&mut rng());
|
||||
let iter = values.into_iter();
|
||||
Ok(iter.into_pipeline_data_with_metadata(
|
||||
call.head,
|
||||
|
@ -1,6 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
use rand::prelude::{thread_rng, Rng};
|
||||
use rand::random_bool;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RandomBool;
|
||||
@ -77,8 +76,7 @@ fn bool(
|
||||
}
|
||||
}
|
||||
|
||||
let mut rng = thread_rng();
|
||||
let bool_result: bool = rng.gen_bool(probability);
|
||||
let bool_result: bool = random_bool(probability);
|
||||
|
||||
Ok(PipelineData::Value(Value::bool(bool_result, span), None))
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::Signals;
|
||||
use rand::{
|
||||
distributions::{Alphanumeric, Standard},
|
||||
thread_rng, Rng,
|
||||
distr::{Alphanumeric, StandardUniform},
|
||||
rng, Rng,
|
||||
};
|
||||
|
||||
pub(super) enum RandomDistribution {
|
||||
@ -31,9 +31,9 @@ pub(super) fn random_byte_stream(
|
||||
|
||||
let bytes_to_write = std::cmp::min(remaining_bytes, OUTPUT_CHUNK_SIZE);
|
||||
|
||||
let rng = thread_rng();
|
||||
let rng = rng();
|
||||
let byte_iter: Box<dyn Iterator<Item = u8>> = match distribution {
|
||||
RandomDistribution::Binary => Box::new(rng.sample_iter(Standard)),
|
||||
RandomDistribution::Binary => Box::new(rng.sample_iter(StandardUniform)),
|
||||
RandomDistribution::Alphanumeric => Box::new(rng.sample_iter(Alphanumeric)),
|
||||
};
|
||||
out.extend(byte_iter.take(bytes_to_write));
|
||||
|
@ -1,6 +1,6 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::ListStream;
|
||||
use rand::prelude::{thread_rng, Rng};
|
||||
use rand::random_range;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RandomDice;
|
||||
@ -73,10 +73,7 @@ fn dice(
|
||||
let dice: usize = call.get_flag(engine_state, stack, "dice")?.unwrap_or(1);
|
||||
let sides: usize = call.get_flag(engine_state, stack, "sides")?.unwrap_or(6);
|
||||
|
||||
let iter = (0..dice).map(move |_| {
|
||||
let mut thread_rng = thread_rng();
|
||||
Value::int(thread_rng.gen_range(1..sides + 1) as i64, span)
|
||||
});
|
||||
let iter = (0..dice).map(move |_| Value::int(random_range(1..sides + 1) as i64, span));
|
||||
|
||||
Ok(ListStream::new(iter, span, engine_state.signals().clone()).into())
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{FloatRange, Range};
|
||||
use rand::prelude::{thread_rng, Rng};
|
||||
use rand::random_range;
|
||||
use std::ops::Bound;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -71,8 +71,6 @@ fn float(
|
||||
let span = call.head;
|
||||
let range: Option<Spanned<Range>> = call.opt(engine_state, stack, 0)?;
|
||||
|
||||
let mut thread_rng = thread_rng();
|
||||
|
||||
match range {
|
||||
Some(range) => {
|
||||
let range_span = range.span;
|
||||
@ -90,15 +88,15 @@ fn float(
|
||||
}
|
||||
|
||||
let value = match range.end() {
|
||||
Bound::Included(end) => thread_rng.gen_range(range.start()..=end),
|
||||
Bound::Excluded(end) => thread_rng.gen_range(range.start()..end),
|
||||
Bound::Unbounded => thread_rng.gen_range(range.start()..f64::INFINITY),
|
||||
Bound::Included(end) => random_range(range.start()..=end),
|
||||
Bound::Excluded(end) => random_range(range.start()..end),
|
||||
Bound::Unbounded => random_range(range.start()..f64::INFINITY),
|
||||
};
|
||||
|
||||
Ok(PipelineData::Value(Value::float(value, span), None))
|
||||
}
|
||||
None => Ok(PipelineData::Value(
|
||||
Value::float(thread_rng.gen_range(0.0..1.0), span),
|
||||
Value::float(random_range(0.0..1.0), span),
|
||||
None,
|
||||
)),
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::Range;
|
||||
use rand::prelude::{thread_rng, Rng};
|
||||
use rand::random_range;
|
||||
use std::ops::Bound;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -75,8 +75,6 @@ fn integer(
|
||||
let span = call.head;
|
||||
let range: Option<Spanned<Range>> = call.opt(engine_state, stack, 0)?;
|
||||
|
||||
let mut thread_rng = thread_rng();
|
||||
|
||||
match range {
|
||||
Some(range) => {
|
||||
let range_span = range.span;
|
||||
@ -94,9 +92,9 @@ fn integer(
|
||||
}
|
||||
|
||||
let value = match range.end() {
|
||||
Bound::Included(end) => thread_rng.gen_range(range.start()..=end),
|
||||
Bound::Excluded(end) => thread_rng.gen_range(range.start()..end),
|
||||
Bound::Unbounded => thread_rng.gen_range(range.start()..=i64::MAX),
|
||||
Bound::Included(end) => random_range(range.start()..=end),
|
||||
Bound::Excluded(end) => random_range(range.start()..end),
|
||||
Bound::Unbounded => random_range(range.start()..=i64::MAX),
|
||||
};
|
||||
|
||||
Ok(PipelineData::Value(Value::int(value, span), None))
|
||||
@ -110,7 +108,7 @@ fn integer(
|
||||
}
|
||||
}
|
||||
None => Ok(PipelineData::Value(
|
||||
Value::int(thread_rng.gen_range(0..=i64::MAX), span),
|
||||
Value::int(random_range(0..=i64::MAX), span),
|
||||
None,
|
||||
)),
|
||||
}
|
||||
|
@ -78,6 +78,8 @@ impl Command for External {
|
||||
_ => Path::new(&*name_str).to_owned(),
|
||||
};
|
||||
|
||||
let paths = nu_engine::env::path_str(engine_state, stack, call.head)?;
|
||||
|
||||
// On Windows, the user could have run the cmd.exe built-in "assoc" command
|
||||
// Example: "assoc .nu=nuscript" and then run the cmd.exe built-in "ftype" command
|
||||
// Example: "ftype nuscript=C:\path\to\nu.exe '%1' %*" and then added the nushell
|
||||
@ -88,7 +90,7 @@ impl Command for External {
|
||||
// easy way to do this is to run cmd.exe with the script as an argument.
|
||||
let potential_nuscript_in_windows = if cfg!(windows) {
|
||||
// let's make sure it's a .nu script
|
||||
if let Some(executable) = which(&expanded_name, "", cwd.as_ref()) {
|
||||
if let Some(executable) = which(&expanded_name, &paths, cwd.as_ref()) {
|
||||
let ext = executable
|
||||
.extension()
|
||||
.unwrap_or_default()
|
||||
@ -133,7 +135,6 @@ impl Command for External {
|
||||
} else {
|
||||
// Determine the PATH to be used and then use `which` to find it - though this has no
|
||||
// effect if it's an absolute path already
|
||||
let paths = nu_engine::env::path_str(engine_state, stack, call.head)?;
|
||||
let Some(executable) = which(&expanded_name, &paths, cwd.as_ref()) else {
|
||||
return Err(command_not_found(
|
||||
&name_str,
|
||||
|
@ -1,5 +1,6 @@
|
||||
use data_encoding::HEXUPPER;
|
||||
use rand::prelude::*;
|
||||
use rand::random_range;
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
|
||||
use nu_test_support::nu;
|
||||
@ -16,7 +17,7 @@ fn random_bytes() -> Vec<String> {
|
||||
|
||||
(0..NUM)
|
||||
.map(|_| {
|
||||
let length = rng.gen_range(0..512);
|
||||
let length = random_range(0..512);
|
||||
let mut bytes = vec![0u8; length];
|
||||
rng.fill_bytes(&mut bytes);
|
||||
HEXUPPER.encode(&bytes)
|
||||
|
@ -7,8 +7,9 @@ use nu_test_support::{
|
||||
playground::{Dirs, Playground},
|
||||
};
|
||||
use rand::{
|
||||
distributions::{Alphanumeric, DistString, Standard},
|
||||
distr::{Alphanumeric, SampleString, StandardUniform},
|
||||
prelude::Distribution,
|
||||
random_range,
|
||||
rngs::StdRng,
|
||||
Rng, SeedableRng,
|
||||
};
|
||||
@ -382,7 +383,7 @@ struct TestRow(
|
||||
|
||||
impl TestRow {
|
||||
pub fn random() -> Self {
|
||||
StdRng::from_entropy().sample(Standard)
|
||||
StdRng::from_os_rng().sample(StandardUniform)
|
||||
}
|
||||
}
|
||||
|
||||
@ -433,12 +434,12 @@ impl TryFrom<&rusqlite::Row<'_>> for TestRow {
|
||||
}
|
||||
}
|
||||
|
||||
impl Distribution<TestRow> for Standard {
|
||||
impl Distribution<TestRow> for StandardUniform {
|
||||
fn sample<R>(&self, rng: &mut R) -> TestRow
|
||||
where
|
||||
R: rand::Rng + ?Sized,
|
||||
{
|
||||
let dt = DateTime::from_timestamp_millis(rng.gen_range(0..2324252554000))
|
||||
let dt = DateTime::from_timestamp_millis(random_range(0..2324252554000))
|
||||
.unwrap()
|
||||
.fixed_offset();
|
||||
|
||||
@ -446,18 +447,18 @@ impl Distribution<TestRow> for Standard {
|
||||
|
||||
// limit the size of the numbers to work around
|
||||
// https://github.com/nushell/nushell/issues/10612
|
||||
let filesize = rng.gen_range(-1024..=1024);
|
||||
let duration = rng.gen_range(-1024..=1024);
|
||||
let filesize = random_range(-1024..=1024);
|
||||
let duration = random_range(-1024..=1024);
|
||||
|
||||
TestRow(
|
||||
rng.gen(),
|
||||
rng.gen(),
|
||||
rng.gen(),
|
||||
rng.random(),
|
||||
rng.random(),
|
||||
rng.random(),
|
||||
filesize,
|
||||
duration,
|
||||
dt,
|
||||
rand_string,
|
||||
rng.gen::<u64>().to_be_bytes().to_vec(),
|
||||
rng.random::<u64>().to_be_bytes().to_vec(),
|
||||
rusqlite::types::Value::Null,
|
||||
)
|
||||
}
|
||||
|
@ -40,22 +40,6 @@ fn do_with_semicolon_break_on_failed_external() {
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_shell_errors_works_for_external_with_semicolon() {
|
||||
let actual = nu!(r#"do -s { open asdfasdf.txt }; "text""#);
|
||||
|
||||
assert!(actual.err.contains("Deprecated option"));
|
||||
assert_eq!(actual.out, "text");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_program_errors_works_for_external_with_semicolon() {
|
||||
let actual = nu!(r#"do -p { nu -n -c 'exit 1' }; "text""#);
|
||||
|
||||
assert!(actual.err.contains("Deprecated option"));
|
||||
assert_eq!(actual.out, "text");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_error_should_work_for_external_command() {
|
||||
let actual = nu!(r#"do -i { nu --testbin fail asdf }; echo post"#);
|
||||
@ -76,11 +60,3 @@ fn run_closure_with_it_using() {
|
||||
assert!(actual.err.is_empty());
|
||||
assert_eq!(actual.out, "3");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn waits_for_external() {
|
||||
let actual = nu!(r#"do -p { nu -c 'sleep 1sec; print before; exit 1'}; print after"#);
|
||||
|
||||
assert!(actual.err.contains("Deprecated option"));
|
||||
assert_eq!(actual.out, "beforeafter");
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ url = { workspace = true }
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.103.1" }
|
||||
nu-command = { path = "../nu-command", version = "0.103.1" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.103.1" }
|
||||
nu-std = { path = "../nu-std", version = "0.103.1" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.103.1" }
|
||||
|
||||
assert-json-diff = "2.0"
|
||||
|
@ -25,14 +25,14 @@ fn try_find_id_in_misc(
|
||||
location: Option<&usize>,
|
||||
id_ref: Option<&Id>,
|
||||
) -> Option<(Id, Span)> {
|
||||
let call_name = working_set.get_span_contents(call.head);
|
||||
let call_name = working_set.get_decl(call.decl_id).name();
|
||||
match call_name {
|
||||
b"def" | b"export def" => try_find_id_in_def(call, working_set, location, id_ref),
|
||||
b"module" | b"export module" => try_find_id_in_mod(call, working_set, location, id_ref),
|
||||
b"use" | b"export use" | b"hide" => {
|
||||
"def" | "export def" => try_find_id_in_def(call, working_set, location, id_ref),
|
||||
"module" | "export module" => try_find_id_in_mod(call, working_set, location, id_ref),
|
||||
"use" | "export use" | "hide" => {
|
||||
try_find_id_in_use(call, working_set, location, id_ref, call_name)
|
||||
}
|
||||
b"overlay use" | b"overlay hide" => {
|
||||
"overlay use" | "overlay hide" => {
|
||||
try_find_id_in_overlay(call, working_set, location, id_ref)
|
||||
}
|
||||
_ => None,
|
||||
@ -141,7 +141,7 @@ fn try_find_id_in_use(
|
||||
working_set: &StateWorkingSet,
|
||||
location: Option<&usize>,
|
||||
id: Option<&Id>,
|
||||
call_name: &[u8],
|
||||
call_name: &str,
|
||||
) -> Option<(Id, Span)> {
|
||||
// TODO: for keyword `hide`, the decl/var is already hidden in working_set,
|
||||
// this function will always return None.
|
||||
@ -176,7 +176,7 @@ fn try_find_id_in_use(
|
||||
if let Some(pos) = location {
|
||||
// first argument of `use` should always be module name
|
||||
// while it is optional in `hide`
|
||||
if span.contains(*pos) && call_name != b"hide" {
|
||||
if span.contains(*pos) && call_name != "hide" {
|
||||
return get_matched_module_id(working_set, span, id);
|
||||
}
|
||||
}
|
||||
@ -196,7 +196,7 @@ fn try_find_id_in_use(
|
||||
})
|
||||
};
|
||||
|
||||
let arguments = if call_name != b"hide" {
|
||||
let arguments = if call_name != "hide" {
|
||||
call.arguments.get(1..)?
|
||||
} else {
|
||||
call.arguments.as_slice()
|
||||
|
@ -28,22 +28,15 @@ impl LanguageServer {
|
||||
.and_then(|s| s.chars().next())
|
||||
.is_some_and(|c| c.is_whitespace() || "|(){}[]<>,:;".contains(c));
|
||||
|
||||
let (results, engine_state) = if need_fallback {
|
||||
let engine_state = Arc::new(self.initial_engine_state.clone());
|
||||
let completer = NuCompleter::new(engine_state.clone(), Arc::new(Stack::new()));
|
||||
(
|
||||
completer.fetch_completions_at(&file_text[..location], location),
|
||||
engine_state,
|
||||
)
|
||||
} else {
|
||||
self.need_parse |= need_fallback;
|
||||
let engine_state = Arc::new(self.new_engine_state());
|
||||
let completer = NuCompleter::new(engine_state.clone(), Arc::new(Stack::new()));
|
||||
let results = if need_fallback {
|
||||
completer.fetch_completions_at(&file_text[..location], location)
|
||||
} else {
|
||||
let file_path = uri_to_path(&path_uri);
|
||||
let filename = file_path.to_str()?;
|
||||
(
|
||||
completer.fetch_completions_within_file(filename, location, &file_text),
|
||||
engine_state,
|
||||
)
|
||||
completer.fetch_completions_within_file(filename, location, &file_text)
|
||||
};
|
||||
|
||||
let docs = self.docs.lock().ok()?;
|
||||
@ -63,10 +56,8 @@ impl LanguageServer {
|
||||
}
|
||||
|
||||
let span = r.suggestion.span;
|
||||
let range = span_to_range(&Span::new(span.start, span.end), file, 0);
|
||||
|
||||
let text_edit = Some(CompletionTextEdit::Edit(TextEdit {
|
||||
range,
|
||||
range: span_to_range(&Span::new(span.start, span.end), file, 0),
|
||||
new_text: label_value.clone(),
|
||||
}));
|
||||
|
||||
@ -236,7 +227,7 @@ mod tests {
|
||||
"detail": "Edit nu configurations.",
|
||||
"textEdit": { "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 8 }, },
|
||||
"newText": "config nu "
|
||||
}
|
||||
},
|
||||
},
|
||||
])
|
||||
);
|
||||
@ -549,4 +540,96 @@ mod tests {
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn complete_use_arguments() {
|
||||
let (client_connection, _recv) = initialize_language_server(None, None);
|
||||
|
||||
let mut script = fixtures();
|
||||
script.push("lsp");
|
||||
script.push("completion");
|
||||
script.push("use.nu");
|
||||
let script = path_to_uri(&script);
|
||||
|
||||
open_unchecked(&client_connection, script.clone());
|
||||
let resp = send_complete_request(&client_connection, script.clone(), 4, 17);
|
||||
assert_json_include!(
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!([
|
||||
{
|
||||
"label": "std-rfc",
|
||||
"labelDetails": { "description": "module" },
|
||||
"textEdit": {
|
||||
"newText": "std-rfc",
|
||||
"range": { "start": { "character": 11, "line": 4 }, "end": { "character": 17, "line": 4 } }
|
||||
},
|
||||
"kind": 9 // module kind
|
||||
}
|
||||
])
|
||||
);
|
||||
|
||||
let resp = send_complete_request(&client_connection, script.clone(), 5, 22);
|
||||
assert_json_include!(
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!([
|
||||
{
|
||||
"label": "clip",
|
||||
"labelDetails": { "description": "module" },
|
||||
"textEdit": {
|
||||
"newText": "clip",
|
||||
"range": { "start": { "character": 19, "line": 5 }, "end": { "character": 23, "line": 5 } }
|
||||
},
|
||||
"kind": 9 // module kind
|
||||
}
|
||||
])
|
||||
);
|
||||
|
||||
let resp = send_complete_request(&client_connection, script.clone(), 5, 35);
|
||||
assert_json_include!(
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!([
|
||||
{
|
||||
"label": "paste",
|
||||
"labelDetails": { "description": "custom" },
|
||||
"textEdit": {
|
||||
"newText": "paste",
|
||||
"range": { "start": { "character": 32, "line": 5 }, "end": { "character": 37, "line": 5 } }
|
||||
},
|
||||
"kind": 2
|
||||
}
|
||||
])
|
||||
);
|
||||
|
||||
let resp = send_complete_request(&client_connection, script.clone(), 6, 14);
|
||||
assert_json_include!(
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!([
|
||||
{
|
||||
"label": "null_device",
|
||||
"labelDetails": { "description": "variable" },
|
||||
"textEdit": {
|
||||
"newText": "null_device",
|
||||
"range": { "start": { "character": 8, "line": 6 }, "end": { "character": 14, "line": 6 } }
|
||||
},
|
||||
"kind": 6 // variable kind
|
||||
}
|
||||
])
|
||||
);
|
||||
|
||||
let resp = send_complete_request(&client_connection, script, 7, 13);
|
||||
assert_json_include!(
|
||||
actual: result_from_message(resp),
|
||||
expected: serde_json::json!([
|
||||
{
|
||||
"label": "foo",
|
||||
"labelDetails": { "description": "variable" },
|
||||
"textEdit": {
|
||||
"newText": "foo",
|
||||
"range": { "start": { "character": 11, "line": 7 }, "end": { "character": 14, "line": 7 } }
|
||||
},
|
||||
"kind": 6 // variable kind
|
||||
}
|
||||
])
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -440,6 +440,7 @@ mod tests {
|
||||
TextDocumentPositionParams, WorkDoneProgressParams,
|
||||
};
|
||||
use nu_protocol::{debugger::WithoutDebug, engine::Stack, PipelineData, ShellError, Value};
|
||||
use nu_std::load_standard_library;
|
||||
use std::sync::mpsc::{self, Receiver};
|
||||
use std::time::Duration;
|
||||
|
||||
@ -455,6 +456,7 @@ mod tests {
|
||||
let engine_state = nu_cmd_lang::create_default_context();
|
||||
let mut engine_state = nu_command::add_shell_command_context(engine_state);
|
||||
engine_state.generate_nu_constant();
|
||||
assert!(load_standard_library(&mut engine_state).is_ok());
|
||||
let cwd = std::env::current_dir().expect("Could not get current working directory.");
|
||||
engine_state.add_env_var(
|
||||
"PWD".into(),
|
||||
|
@ -658,7 +658,7 @@ mod tests {
|
||||
|
||||
let message_num = 5;
|
||||
let messages =
|
||||
send_reference_request(&client_connection, script.clone(), 6, 11, message_num);
|
||||
send_reference_request(&client_connection, script.clone(), 6, 12, message_num);
|
||||
assert_eq!(messages.len(), message_num);
|
||||
for message in messages {
|
||||
match message {
|
||||
@ -676,7 +676,7 @@ mod tests {
|
||||
assert!(array.contains(&serde_json::json!(
|
||||
{
|
||||
"uri": script.to_string(),
|
||||
"range": { "start": { "line": 6, "character": 12 }, "end": { "line": 6, "character": 19 } }
|
||||
"range": { "start": { "line": 6, "character": 13 }, "end": { "line": 6, "character": 20 } }
|
||||
}
|
||||
)
|
||||
));
|
||||
@ -712,7 +712,7 @@ mod tests {
|
||||
&client_connection,
|
||||
script.clone(),
|
||||
6,
|
||||
11,
|
||||
12,
|
||||
message_num,
|
||||
false,
|
||||
);
|
||||
@ -723,8 +723,8 @@ mod tests {
|
||||
Message::Response(r) => assert_json_eq!(
|
||||
r.result,
|
||||
serde_json::json!({
|
||||
"start": { "line": 6, "character": 12 },
|
||||
"end": { "line": 6, "character": 19 }
|
||||
"start": { "line": 6, "character": 13 },
|
||||
"end": { "line": 6, "character": 20 }
|
||||
}),
|
||||
),
|
||||
_ => panic!("unexpected message type"),
|
||||
@ -738,7 +738,7 @@ mod tests {
|
||||
changes[script.to_string()],
|
||||
serde_json::json!([
|
||||
{
|
||||
"range": { "start": { "line": 6, "character": 12 }, "end": { "line": 6, "character": 19 } },
|
||||
"range": { "start": { "line": 6, "character": 13 }, "end": { "line": 6, "character": 20 } },
|
||||
"newText": "new"
|
||||
}
|
||||
])
|
||||
@ -860,7 +860,7 @@ mod tests {
|
||||
&client_connection,
|
||||
script.clone(),
|
||||
6,
|
||||
11,
|
||||
12,
|
||||
message_num,
|
||||
true,
|
||||
);
|
||||
|
@ -40,7 +40,7 @@ pub fn has_trailing_slash(path: &Path) -> bool {
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn has_trailing_slash(path: &Path) -> bool {
|
||||
// in the web paths are often just URLs, they are separated by forward slashes
|
||||
path.to_str().map_or(false, |s| s.ends_with('/'))
|
||||
path.to_str().is_some_and(|s| s.ends_with('/'))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -295,7 +295,8 @@ $env.config.display_errors.termination_signal = true
|
||||
$env.config.footer_mode = 25
|
||||
|
||||
# table.*
|
||||
# table_mode (string):
|
||||
# mode (string):
|
||||
# Specifies the visual display style of a table
|
||||
# One of: "default", "basic", "compact", "compact_double", "heavy", "light", "none", "reinforced",
|
||||
# "rounded", "thin", "with_love", "psql", "markdown", "dots", "restructured", "ascii_rounded",
|
||||
# or "basic_compact"
|
||||
|
@ -39,14 +39,14 @@ impl PluginCommand for ToRepr {
|
||||
result: Some(Value::string(
|
||||
r#"
|
||||
shape: (2, 2)
|
||||
┌─────────────────────┬─────┐
|
||||
┌─────────────────────────┬─────┐
|
||||
│ a ┆ b │
|
||||
│ --- ┆ --- │
|
||||
│ datetime[ns] ┆ i64 │
|
||||
╞═════════════════════╪═════╡
|
||||
│ 2025-01-01 00:00:00 ┆ 2 │
|
||||
│ 2025-01-02 00:00:00 ┆ 4 │
|
||||
└─────────────────────┴─────┘"#
|
||||
│ datetime[ns, UTC] ┆ i64 │
|
||||
╞═════════════════════════╪═════╡
|
||||
│ 2025-01-01 00:00:00 UTC ┆ 2 │
|
||||
│ 2025-01-02 00:00:00 UTC ┆ 4 │
|
||||
└─────────────────────────┴─────┘"#
|
||||
.trim(),
|
||||
Span::test_data(),
|
||||
)),
|
||||
@ -54,18 +54,18 @@ shape: (2, 2)
|
||||
Example {
|
||||
description: "Shows lazy dataframe in repr format",
|
||||
example:
|
||||
"[[a b]; [2025-01-01 2] [2025-01-02 4]] | polars into-df | polars into-lazy | polars into-repr",
|
||||
"[[a b]; [2025-01-01 2] [2025-01-02 4]] | polars into-lazy | polars into-repr",
|
||||
result: Some(Value::string(
|
||||
r#"
|
||||
shape: (2, 2)
|
||||
┌─────────────────────┬─────┐
|
||||
┌─────────────────────────┬─────┐
|
||||
│ a ┆ b │
|
||||
│ --- ┆ --- │
|
||||
│ datetime[ns] ┆ i64 │
|
||||
╞═════════════════════╪═════╡
|
||||
│ 2025-01-01 00:00:00 ┆ 2 │
|
||||
│ 2025-01-02 00:00:00 ┆ 4 │
|
||||
└─────────────────────┴─────┘"#
|
||||
│ datetime[ns, UTC] ┆ i64 │
|
||||
╞═════════════════════════╪═════╡
|
||||
│ 2025-01-01 00:00:00 UTC ┆ 2 │
|
||||
│ 2025-01-02 00:00:00 UTC ┆ 4 │
|
||||
└─────────────────────────┴─────┘"#
|
||||
.trim(),
|
||||
Span::test_data(),
|
||||
)),
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::{values::CustomValueSupport, PolarsPlugin};
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::super::super::values::{Column, NuDataFrame};
|
||||
use super::super::super::values::{Column, NuDataFrame, NuSchema};
|
||||
|
||||
use chrono::DateTime;
|
||||
use nu_plugin::{EngineInterface, EvaluatedCall, PluginCommand};
|
||||
@ -8,7 +9,7 @@ use nu_protocol::{
|
||||
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, SyntaxShape, Type,
|
||||
Value,
|
||||
};
|
||||
use polars::prelude::{IntoSeries, StringMethods, TimeUnit};
|
||||
use polars::prelude::{DataType, Field, IntoSeries, Schema, StringMethods, TimeUnit};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AsDateTime;
|
||||
@ -43,6 +44,7 @@ impl PluginCommand for AsDateTime {
|
||||
Signature::build(self.name())
|
||||
.required("format", SyntaxShape::String, "formatting date time string")
|
||||
.switch("not-exact", "the format string may be contained in the date (e.g. foo-2021-01-01-bar could match 2021-01-01)", Some('n'))
|
||||
.switch("naive", "the input datetimes should be parsed as naive (i.e., not timezone-aware)", None)
|
||||
.input_output_type(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
@ -54,7 +56,7 @@ impl PluginCommand for AsDateTime {
|
||||
vec![
|
||||
Example {
|
||||
description: "Converts string to datetime",
|
||||
example: r#"["2021-12-30 00:00:00" "2021-12-31 00:00:00"] | polars into-df | polars as-datetime "%Y-%m-%d %H:%M:%S""#,
|
||||
example: r#"["2021-12-30 00:00:00 -0400" "2021-12-31 00:00:00 -0400"] | polars into-df | polars as-datetime "%Y-%m-%d %H:%M:%S %z""#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
@ -62,7 +64,7 @@ impl PluginCommand for AsDateTime {
|
||||
vec![
|
||||
Value::date(
|
||||
DateTime::parse_from_str(
|
||||
"2021-12-30 00:00:00 +0000",
|
||||
"2021-12-30 00:00:00 -0400",
|
||||
"%Y-%m-%d %H:%M:%S %z",
|
||||
)
|
||||
.expect("date calculation should not fail in test"),
|
||||
@ -70,7 +72,7 @@ impl PluginCommand for AsDateTime {
|
||||
),
|
||||
Value::date(
|
||||
DateTime::parse_from_str(
|
||||
"2021-12-31 00:00:00 +0000",
|
||||
"2021-12-31 00:00:00 -0400",
|
||||
"%Y-%m-%d %H:%M:%S %z",
|
||||
)
|
||||
.expect("date calculation should not fail in test"),
|
||||
@ -86,7 +88,7 @@ impl PluginCommand for AsDateTime {
|
||||
},
|
||||
Example {
|
||||
description: "Converts string to datetime with high resolutions",
|
||||
example: r#"["2021-12-30 00:00:00.123456789" "2021-12-31 00:00:00.123456789"] | polars into-df | polars as-datetime "%Y-%m-%d %H:%M:%S.%9f""#,
|
||||
example: r#"["2021-12-30 00:00:00.123456789" "2021-12-31 00:00:00.123456789"] | polars into-df | polars as-datetime "%Y-%m-%d %H:%M:%S.%9f" --naive"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
@ -110,7 +112,15 @@ impl PluginCommand for AsDateTime {
|
||||
),
|
||||
],
|
||||
)],
|
||||
None,
|
||||
Some(NuSchema::new(Arc::new(Schema::from_iter(vec![
|
||||
Field::new(
|
||||
"datetime".into(),
|
||||
DataType::Datetime(
|
||||
TimeUnit::Nanoseconds,
|
||||
None
|
||||
),
|
||||
),
|
||||
])))),
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
@ -118,7 +128,7 @@ impl PluginCommand for AsDateTime {
|
||||
},
|
||||
Example {
|
||||
description: "Converts string to datetime using the `--not-exact` flag even with excessive symbols",
|
||||
example: r#"["2021-12-30 00:00:00 GMT+4"] | polars into-df | polars as-datetime "%Y-%m-%d %H:%M:%S" --not-exact"#,
|
||||
example: r#"["2021-12-30 00:00:00 GMT+4"] | polars into-df | polars as-datetime "%Y-%m-%d %H:%M:%S" --not-exact --naive"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
@ -134,7 +144,15 @@ impl PluginCommand for AsDateTime {
|
||||
),
|
||||
],
|
||||
)],
|
||||
None,
|
||||
Some(NuSchema::new(Arc::new(Schema::from_iter(vec![
|
||||
Field::new(
|
||||
"datetime".into(),
|
||||
DataType::Datetime(
|
||||
TimeUnit::Nanoseconds,
|
||||
None
|
||||
),
|
||||
),
|
||||
])))),
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
@ -162,6 +180,7 @@ fn command(
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let format: String = call.req(0)?;
|
||||
let not_exact = call.has_flag("not-exact")?;
|
||||
let tz_aware = !call.has_flag("naive")?;
|
||||
|
||||
let df = NuDataFrame::try_from_pipeline_coerce(plugin, input, call.head)?;
|
||||
let series = df.as_series(call.head)?;
|
||||
@ -177,7 +196,7 @@ fn command(
|
||||
casted.as_datetime_not_exact(
|
||||
Some(format.as_str()),
|
||||
TimeUnit::Nanoseconds,
|
||||
false,
|
||||
tz_aware,
|
||||
None,
|
||||
&Default::default(),
|
||||
)
|
||||
@ -186,7 +205,7 @@ fn command(
|
||||
Some(format.as_str()),
|
||||
TimeUnit::Nanoseconds,
|
||||
false,
|
||||
false,
|
||||
tz_aware,
|
||||
None,
|
||||
&Default::default(),
|
||||
)
|
||||
|
@ -1,7 +1,8 @@
|
||||
use crate::values::NuExpression;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{
|
||||
dataframe::values::{Column, NuDataFrame},
|
||||
dataframe::values::{Column, NuDataFrame, NuSchema},
|
||||
values::CustomValueSupport,
|
||||
PolarsPlugin,
|
||||
};
|
||||
@ -13,7 +14,7 @@ use nu_protocol::{
|
||||
};
|
||||
use polars::{
|
||||
datatypes::{DataType, TimeUnit},
|
||||
prelude::NamedFrom,
|
||||
prelude::{Field, NamedFrom, Schema},
|
||||
series::Series,
|
||||
};
|
||||
|
||||
@ -54,14 +55,20 @@ impl PluginCommand for ExprDatePart {
|
||||
vec![
|
||||
Example {
|
||||
description: "Creates an expression to capture the year date part",
|
||||
example: r#"[["2021-12-30T01:02:03.123456789"]] | polars into-df | polars as-datetime "%Y-%m-%dT%H:%M:%S.%9f" | polars with-column [(polars col datetime | polars datepart year | polars as datetime_year )]"#,
|
||||
example: r#"[["2021-12-30T01:02:03.123456789"]] | polars into-df | polars as-datetime "%Y-%m-%dT%H:%M:%S.%9f" --naive | polars with-column [(polars col datetime | polars datepart year | polars as datetime_year )]"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new("datetime".to_string(), vec![Value::test_date(dt)]),
|
||||
Column::new("datetime_year".to_string(), vec![Value::test_int(2021)]),
|
||||
],
|
||||
None,
|
||||
Some(NuSchema::new(Arc::new(Schema::from_iter(vec![
|
||||
Field::new(
|
||||
"datetime".into(),
|
||||
DataType::Datetime(TimeUnit::Nanoseconds, None),
|
||||
),
|
||||
Field::new("datetime_year".into(), DataType::Int64),
|
||||
])))),
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
@ -69,7 +76,7 @@ impl PluginCommand for ExprDatePart {
|
||||
},
|
||||
Example {
|
||||
description: "Creates an expression to capture multiple date parts",
|
||||
example: r#"[["2021-12-30T01:02:03.123456789"]] | polars into-df | polars as-datetime "%Y-%m-%dT%H:%M:%S.%9f" |
|
||||
example: r#"[["2021-12-30T01:02:03.123456789"]] | polars into-df | polars as-datetime "%Y-%m-%dT%H:%M:%S.%9f" --naive |
|
||||
polars with-column [ (polars col datetime | polars datepart year | polars as datetime_year ),
|
||||
(polars col datetime | polars datepart month | polars as datetime_month ),
|
||||
(polars col datetime | polars datepart day | polars as datetime_day ),
|
||||
|
@ -245,7 +245,10 @@ fn value_to_data_type(value: &Value) -> Option<DataType> {
|
||||
Value::Float { .. } => Some(DataType::Float64),
|
||||
Value::String { .. } => Some(DataType::String),
|
||||
Value::Bool { .. } => Some(DataType::Boolean),
|
||||
Value::Date { .. } => Some(DataType::Date),
|
||||
Value::Date { .. } => Some(DataType::Datetime(
|
||||
TimeUnit::Nanoseconds,
|
||||
Some(PlSmallStr::from_static("UTC")),
|
||||
)),
|
||||
Value::Duration { .. } => Some(DataType::Duration(TimeUnit::Nanoseconds)),
|
||||
Value::Filesize { .. } => Some(DataType::Int64),
|
||||
Value::Binary { .. } => Some(DataType::Binary),
|
||||
@ -447,13 +450,13 @@ fn typed_column_to_series(name: PlSmallStr, column: TypedColumn) -> Result<Serie
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| {
|
||||
if let Value::Date { val, .. } = &v {
|
||||
match (maybe_tz, &v) {
|
||||
(Some(tz), Value::Date { val, .. }) => {
|
||||
// If there is a timezone specified, make sure
|
||||
// the value is converted to it
|
||||
Ok(maybe_tz
|
||||
.as_ref()
|
||||
.map(|tz| tz.parse::<Tz>().map(|tz| val.with_timezone(&tz)))
|
||||
.transpose()
|
||||
Ok(tz
|
||||
.parse::<Tz>()
|
||||
.map(|tz| val.with_timezone(&tz))
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error parsing timezone".into(),
|
||||
msg: "".into(),
|
||||
@ -461,10 +464,14 @@ fn typed_column_to_series(name: PlSmallStr, column: TypedColumn) -> Result<Serie
|
||||
help: Some(e.to_string()),
|
||||
inner: vec![],
|
||||
})?
|
||||
.and_then(|dt| dt.timestamp_nanos_opt())
|
||||
.timestamp_nanos_opt()
|
||||
.map(|nanos| nanos_from_timeunit(nanos, *tu)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
(None, Value::Date { val, .. }) => Ok(val
|
||||
.timestamp_nanos_opt()
|
||||
.map(|nanos| nanos_from_timeunit(nanos, *tu))),
|
||||
|
||||
_ => Ok(None),
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<Option<i64>>, ShellError>>()?;
|
||||
|
@ -71,7 +71,7 @@ impl Default for DataFrameValue {
|
||||
|
||||
impl PartialEq for DataFrameValue {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0.partial_cmp(&other.0).map_or(false, Ordering::is_eq)
|
||||
self.0.partial_cmp(&other.0).is_some_and(Ordering::is_eq)
|
||||
}
|
||||
}
|
||||
impl Eq for DataFrameValue {}
|
||||
|
@ -169,6 +169,67 @@ pub fn str_to_dtype(dtype: &str, span: Span) -> Result<DataType, ShellError> {
|
||||
let time_unit = str_to_time_unit(next, span)?;
|
||||
Ok(DataType::Duration(time_unit))
|
||||
}
|
||||
_ if dtype.starts_with("decimal") => {
|
||||
let dtype = dtype
|
||||
.trim_start_matches("decimal")
|
||||
.trim_start_matches('<')
|
||||
.trim_end_matches('>');
|
||||
let mut split = dtype.split(',');
|
||||
let next = split
|
||||
.next()
|
||||
.ok_or_else(|| ShellError::GenericError {
|
||||
error: "Invalid polars data type".into(),
|
||||
msg: "Missing decimal precision".into(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.trim();
|
||||
let precision = match next {
|
||||
"*" => None, // infer
|
||||
_ => Some(
|
||||
next.parse::<usize>()
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Invalid polars data type".into(),
|
||||
msg: format!("Error in parsing decimal precision: {e}"),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?,
|
||||
),
|
||||
};
|
||||
|
||||
let next = split
|
||||
.next()
|
||||
.ok_or_else(|| ShellError::GenericError {
|
||||
error: "Invalid polars data type".into(),
|
||||
msg: "Missing decimal scale".into(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.trim();
|
||||
let scale = match next {
|
||||
"*" => Err(ShellError::GenericError {
|
||||
error: "Invalid polars data type".into(),
|
||||
msg: "`*` is not a permitted value for scale".into(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}),
|
||||
_ => next
|
||||
.parse::<usize>()
|
||||
.map(Some)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Invalid polars data type".into(),
|
||||
msg: format!("Error in parsing decimal precision: {e}"),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}),
|
||||
}?;
|
||||
Ok(DataType::Decimal(precision, scale))
|
||||
}
|
||||
_ => Err(ShellError::GenericError {
|
||||
error: "Invalid polars data type".into(),
|
||||
msg: format!("Unknown type: {dtype}"),
|
||||
@ -367,6 +428,24 @@ mod test {
|
||||
assert_eq!(schema, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dtype_str_schema_decimal() {
|
||||
let dtype = "decimal<7,2>";
|
||||
let schema = str_to_dtype(dtype, Span::unknown()).unwrap();
|
||||
let expected = DataType::Decimal(Some(7usize), Some(2usize));
|
||||
assert_eq!(schema, expected);
|
||||
|
||||
// "*" is not a permitted value for scale
|
||||
let dtype = "decimal<7,*>";
|
||||
let schema = str_to_dtype(dtype, Span::unknown());
|
||||
assert!(matches!(schema, Err(ShellError::GenericError { .. })));
|
||||
|
||||
let dtype = "decimal<*,2>";
|
||||
let schema = str_to_dtype(dtype, Span::unknown()).unwrap();
|
||||
let expected = DataType::Decimal(None, Some(2usize));
|
||||
assert_eq!(schema, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dtype_str_to_schema_list_types() {
|
||||
let dtype = "list<i32>";
|
||||
@ -383,5 +462,19 @@ mod test {
|
||||
let schema = str_to_dtype(dtype, Span::unknown()).unwrap();
|
||||
let expected = DataType::List(Box::new(DataType::Datetime(TimeUnit::Milliseconds, None)));
|
||||
assert_eq!(schema, expected);
|
||||
|
||||
let dtype = "list<decimal<7,2>>";
|
||||
let schema = str_to_dtype(dtype, Span::unknown()).unwrap();
|
||||
let expected = DataType::List(Box::new(DataType::Decimal(Some(7usize), Some(2usize))));
|
||||
assert_eq!(schema, expected);
|
||||
|
||||
let dtype = "list<decimal<*,2>>";
|
||||
let schema = str_to_dtype(dtype, Span::unknown()).unwrap();
|
||||
let expected = DataType::List(Box::new(DataType::Decimal(None, Some(2usize))));
|
||||
assert_eq!(schema, expected);
|
||||
|
||||
let dtype = "list<decimal<7,*>>";
|
||||
let schema = str_to_dtype(dtype, Span::unknown());
|
||||
assert!(matches!(schema, Err(ShellError::GenericError { .. })));
|
||||
}
|
||||
}
|
||||
|
@ -66,7 +66,7 @@ impl WebTable {
|
||||
let mut tables = html
|
||||
.select(&sel_table)
|
||||
.filter(|table| {
|
||||
table.select(&sel_tr).next().map_or(false, |tr| {
|
||||
table.select(&sel_tr).next().is_some_and(|tr| {
|
||||
let cells = select_cells(tr, &sel_th, true);
|
||||
if inspect_mode {
|
||||
eprintln!("Potential HTML Headers = {:?}\n", &cells);
|
||||
|
@ -16,4 +16,4 @@ profile = "default"
|
||||
# use in nushell, we may opt to use the bleeding edge stable version of rust.
|
||||
# I believe rust is on a 6 week release cycle and nushell is on a 4 week release cycle.
|
||||
# So, every two nushell releases, this version number should be bumped by one.
|
||||
channel = "1.83.0"
|
||||
channel = "1.84.1"
|
||||
|
8
tests/fixtures/lsp/completion/use.nu
vendored
Normal file
8
tests/fixtures/lsp/completion/use.nu
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
export module "🤔🐘" {
|
||||
export const foo = "🤔🐘";
|
||||
}
|
||||
|
||||
export use std-rf
|
||||
export use std-rfc/clip [ copy, paste ]
|
||||
use std null_d
|
||||
use 🤔🐘 [ foo, ]
|
0
tests/fixtures/partial_completions/partial/hol/foo.txt
vendored
Normal file
0
tests/fixtures/partial_completions/partial/hol/foo.txt
vendored
Normal file
Loading…
Reference in New Issue
Block a user