mirror of
https://github.com/nushell/nushell.git
synced 2025-01-24 07:09:02 +01:00
Merge branch 'main' into polars_s3
This commit is contained in:
commit
4c5467d595
2
.github/workflows/typos.yml
vendored
2
.github/workflows/typos.yml
vendored
@ -10,4 +10,4 @@ jobs:
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Check spelling
|
||||
uses: crate-ci/typos@v1.28.1
|
||||
uses: crate-ci/typos@v1.28.2
|
||||
|
34
Cargo.lock
generated
34
Cargo.lock
generated
@ -907,9 +907,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.8.0"
|
||||
version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da"
|
||||
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
|
||||
|
||||
[[package]]
|
||||
name = "bytes-utils"
|
||||
@ -2784,9 +2784,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.6.0"
|
||||
version = "2.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da"
|
||||
checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.15.1",
|
||||
@ -2962,12 +2962,6 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "joinery"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72167d68f5fce3b8655487b8038691a3c9984ee769590f93f2a631f4ad64e4f5"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.72"
|
||||
@ -3454,9 +3448,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "multipart-rs"
|
||||
version = "0.1.11"
|
||||
version = "0.1.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22ea34e5c0fa65ba84707cfaf5bf43d500f1c5a4c6c36327bf5541c5bcd17e98"
|
||||
checksum = "64cae00e7e52aa5072342ef9a2ccd71669be913c2176a81a665b1f9cd79345f2"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
@ -6034,7 +6028,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "reedline"
|
||||
version = "0.37.0"
|
||||
source = "git+https://github.com/nushell/reedline?branch=main#3c46dc2c0c69476a625611a556e67ddb8439629c"
|
||||
source = "git+https://github.com/nushell/reedline?branch=main#9eb3c2dd1375119c7f6bb8ecac07b715e72fe692"
|
||||
dependencies = [
|
||||
"arboard",
|
||||
"chrono",
|
||||
@ -6266,9 +6260,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "roxmltree"
|
||||
version = "0.19.0"
|
||||
version = "0.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3cd14fd5e3b777a7422cca79358c57a8f6e3a703d9ac187448d0daf220c2407f"
|
||||
checksum = "6c20b6793b5c2fa6553b250154b78d6d0db37e72700ae35fad9387a46f487c97"
|
||||
|
||||
[[package]]
|
||||
name = "rstest"
|
||||
@ -7475,12 +7469,10 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "titlecase"
|
||||
version = "2.2.1"
|
||||
version = "3.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38397a8cdb017cfeb48bf6c154d6de975ac69ffeed35980fde199d2ee0842042"
|
||||
checksum = "e0e20e744fbec1913fa168f3ffbef64324bbcb152c6cda8394baa79fa5ec9142"
|
||||
dependencies = [
|
||||
"joinery",
|
||||
"lazy_static",
|
||||
"regex",
|
||||
]
|
||||
|
||||
@ -7825,9 +7817,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "2.10.1"
|
||||
version = "2.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a"
|
||||
checksum = "3193f92e105038f98ae68af40c008e3c94f2f046926e0f95e6c835dc6459bac8"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"encoding_rs",
|
||||
|
10
Cargo.toml
10
Cargo.toml
@ -92,7 +92,7 @@ filetime = "0.2"
|
||||
fuzzy-matcher = "0.3"
|
||||
heck = "0.5.0"
|
||||
human-date-parser = "0.2.0"
|
||||
indexmap = "2.6"
|
||||
indexmap = "2.7"
|
||||
indicatif = "0.17"
|
||||
interprocess = "2.2.0"
|
||||
is_executable = "1.0"
|
||||
@ -110,7 +110,7 @@ miette = "7.3"
|
||||
mime = "0.3.17"
|
||||
mime_guess = "2.0"
|
||||
mockito = { version = "1.6", default-features = false }
|
||||
multipart-rs = "0.1.11"
|
||||
multipart-rs = "0.1.13"
|
||||
native-tls = "0.2"
|
||||
nix = { version = "0.29", default-features = false }
|
||||
notify-debouncer-full = { version = "0.3", default-features = false }
|
||||
@ -143,7 +143,7 @@ regex = "1.9.5"
|
||||
rmp = "0.8"
|
||||
rmp-serde = "1.3"
|
||||
ropey = "1.6.1"
|
||||
roxmltree = "0.19"
|
||||
roxmltree = "0.20"
|
||||
rstest = { version = "0.23", default-features = false }
|
||||
rusqlite = "0.31"
|
||||
rust-embed = "8.5.0"
|
||||
@ -159,13 +159,13 @@ sysinfo = "0.32"
|
||||
tabled = { version = "0.16.0", default-features = false }
|
||||
tempfile = "3.14"
|
||||
terminal_size = "0.4"
|
||||
titlecase = "2.0"
|
||||
titlecase = "3.0"
|
||||
toml = "0.8"
|
||||
trash = "5.2"
|
||||
umask = "2.1"
|
||||
unicode-segmentation = "1.12"
|
||||
unicode-width = "0.2"
|
||||
ureq = { version = "2.10", default-features = false }
|
||||
ureq = { version = "2.12", default-features = false }
|
||||
url = "2.2"
|
||||
uu_cp = "0.0.28"
|
||||
uu_mkdir = "0.0.28"
|
||||
|
@ -41,8 +41,7 @@ impl CommandCompletion {
|
||||
) -> HashMap<String, SemanticSuggestion> {
|
||||
let mut suggs = HashMap::new();
|
||||
|
||||
// os agnostic way to get the PATH env var
|
||||
let paths = working_set.permanent_state.get_path_env_var();
|
||||
let paths = working_set.permanent_state.get_env_var_insensitive("path");
|
||||
|
||||
if let Some(paths) = paths {
|
||||
if let Ok(paths) = paths.as_list() {
|
||||
|
@ -297,7 +297,7 @@ impl NuCompleter {
|
||||
let mut completer =
|
||||
OperatorCompletion::new(pipeline_element.expr.clone());
|
||||
|
||||
return self.process_completion(
|
||||
let operator_suggestion = self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
@ -305,6 +305,9 @@ impl NuCompleter {
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
if !operator_suggestion.is_empty() {
|
||||
return operator_suggestion;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -357,6 +357,39 @@ fn file_completions() {
|
||||
// Match the results
|
||||
match_suggestions(&expected_paths, &suggestions);
|
||||
|
||||
// Test completions for the current folder even with parts before the autocomplet
|
||||
let target_dir = format!("cp somefile.txt {dir_str}{MAIN_SEPARATOR}");
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![
|
||||
folder(dir.join("another")),
|
||||
file(dir.join("custom_completion.nu")),
|
||||
folder(dir.join("directory_completion")),
|
||||
file(dir.join("nushell")),
|
||||
folder(dir.join("test_a")),
|
||||
folder(dir.join("test_b")),
|
||||
file(dir.join(".hidden_file")),
|
||||
folder(dir.join(".hidden_folder")),
|
||||
];
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let separator = '/';
|
||||
let target_dir = format!("cp somefile.txt {dir_str}{separator}");
|
||||
let slash_suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
let expected_slash_paths: Vec<String> = expected_paths
|
||||
.iter()
|
||||
.map(|s| s.replace('\\', "/"))
|
||||
.collect();
|
||||
|
||||
match_suggestions(&expected_slash_paths, &slash_suggestions);
|
||||
}
|
||||
|
||||
// Match the results
|
||||
match_suggestions(&expected_paths, &suggestions);
|
||||
|
||||
// Test completions for a file
|
||||
let target_dir = format!("cp {}", folder(dir.join("another")));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
@ -391,6 +424,75 @@ fn file_completions() {
|
||||
match_suggestions(&expected_paths, &suggestions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn custom_command_rest_any_args_file_completions() {
|
||||
// Create a new engine
|
||||
let (dir, dir_str, mut engine, mut stack) = new_engine();
|
||||
let command = r#"def list [ ...args: any ] {}"#;
|
||||
assert!(support::merge_input(command.as_bytes(), &mut engine, &mut stack).is_ok());
|
||||
|
||||
// Instantiate a new completer
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
// Test completions for the current folder
|
||||
let target_dir = format!("list {dir_str}{MAIN_SEPARATOR}");
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![
|
||||
folder(dir.join("another")),
|
||||
file(dir.join("custom_completion.nu")),
|
||||
folder(dir.join("directory_completion")),
|
||||
file(dir.join("nushell")),
|
||||
folder(dir.join("test_a")),
|
||||
folder(dir.join("test_b")),
|
||||
file(dir.join(".hidden_file")),
|
||||
folder(dir.join(".hidden_folder")),
|
||||
];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(&expected_paths, &suggestions);
|
||||
|
||||
// Test completions for the current folder even with parts before the autocomplet
|
||||
let target_dir = format!("list somefile.txt {dir_str}{MAIN_SEPARATOR}");
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![
|
||||
folder(dir.join("another")),
|
||||
file(dir.join("custom_completion.nu")),
|
||||
folder(dir.join("directory_completion")),
|
||||
file(dir.join("nushell")),
|
||||
folder(dir.join("test_a")),
|
||||
folder(dir.join("test_b")),
|
||||
file(dir.join(".hidden_file")),
|
||||
folder(dir.join(".hidden_folder")),
|
||||
];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(&expected_paths, &suggestions);
|
||||
|
||||
// Test completions for a file
|
||||
let target_dir = format!("list {}", folder(dir.join("another")));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(&expected_paths, &suggestions);
|
||||
|
||||
// Test completions for hidden files
|
||||
let target_dir = format!("list {}", file(dir.join(".hidden_folder").join(".")));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
let expected_paths: Vec<String> =
|
||||
vec![file(dir.join(".hidden_folder").join(".hidden_subfile"))];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(&expected_paths, &suggestions);
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn file_completions_with_mixed_separators() {
|
||||
@ -1657,13 +1759,3 @@ fn alias_offset_bug_7754() {
|
||||
// This crashes before PR #7756
|
||||
let _suggestions = completer.complete("ll -a | c", 9);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_path_env_var_8003() {
|
||||
// Create a new engine
|
||||
let (_, _, engine, _) = new_engine();
|
||||
// Get the path env var in a platform agnostic way
|
||||
let the_path = engine.get_path_env_var();
|
||||
// Make sure it's not empty
|
||||
assert!(the_path.is_some());
|
||||
}
|
||||
|
@ -2,4 +2,4 @@ mod from;
|
||||
mod to;
|
||||
|
||||
pub(crate) use from::url::FromUrl;
|
||||
pub(crate) use to::html::ToHtml;
|
||||
pub use to::html::ToHtml;
|
||||
|
@ -9,6 +9,7 @@ mod strings;
|
||||
pub use bits::{
|
||||
Bits, BitsAnd, BitsInto, BitsNot, BitsOr, BitsRol, BitsRor, BitsShl, BitsShr, BitsXor,
|
||||
};
|
||||
pub use formats::ToHtml;
|
||||
pub use math::{MathArcCos, MathArcCosH, MathArcSin, MathArcSinH, MathArcTan, MathArcTanH};
|
||||
pub use math::{MathCos, MathCosH, MathSin, MathSinH, MathTan, MathTanH};
|
||||
pub use math::{MathExp, MathLn};
|
||||
@ -54,7 +55,8 @@ pub fn add_extra_command_context(mut engine_state: EngineState) -> EngineState {
|
||||
strings::str_::case::StrTitleCase
|
||||
);
|
||||
|
||||
bind_command!(formats::ToHtml, formats::FromUrl);
|
||||
bind_command!(ToHtml, formats::FromUrl);
|
||||
|
||||
// Bits
|
||||
bind_command! {
|
||||
Bits,
|
||||
|
@ -106,6 +106,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||
bind_command! {
|
||||
Path,
|
||||
PathBasename,
|
||||
PathSelf,
|
||||
PathDirname,
|
||||
PathExists,
|
||||
PathExpand,
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::Signals;
|
||||
use nu_protocol::{ListStream, Signals};
|
||||
use wax::{Glob as WaxGlob, WalkBehavior, WalkEntry};
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -223,6 +223,7 @@ impl Command for Glob {
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.into_owned()
|
||||
.not(np)
|
||||
.map_err(|err| ShellError::GenericError {
|
||||
error: "error with glob's not pattern".into(),
|
||||
@ -249,6 +250,7 @@ impl Command for Glob {
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.into_owned()
|
||||
.flatten();
|
||||
glob_to_value(
|
||||
engine_state.signals(),
|
||||
@ -258,11 +260,9 @@ impl Command for Glob {
|
||||
no_symlinks,
|
||||
span,
|
||||
)
|
||||
}?;
|
||||
};
|
||||
|
||||
Ok(result
|
||||
.into_iter()
|
||||
.into_pipeline_data(span, engine_state.signals().clone()))
|
||||
Ok(result.into_pipeline_data(span, engine_state.signals().clone()))
|
||||
}
|
||||
}
|
||||
|
||||
@ -281,29 +281,33 @@ fn convert_patterns(columns: &[Value]) -> Result<Vec<String>, ShellError> {
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn glob_to_value<'a>(
|
||||
fn glob_to_value(
|
||||
signals: &Signals,
|
||||
glob_results: impl Iterator<Item = WalkEntry<'a>>,
|
||||
glob_results: impl Iterator<Item = WalkEntry<'static>> + Send + 'static,
|
||||
no_dirs: bool,
|
||||
no_files: bool,
|
||||
no_symlinks: bool,
|
||||
span: Span,
|
||||
) -> Result<Vec<Value>, ShellError> {
|
||||
let mut result: Vec<Value> = Vec::new();
|
||||
for entry in glob_results {
|
||||
signals.check(span)?;
|
||||
) -> ListStream {
|
||||
let map_signals = signals.clone();
|
||||
let result = glob_results.filter_map(move |entry| {
|
||||
if let Err(err) = map_signals.check(span) {
|
||||
return Some(Value::error(err, span));
|
||||
};
|
||||
let file_type = entry.file_type();
|
||||
|
||||
if !(no_dirs && file_type.is_dir()
|
||||
|| no_files && file_type.is_file()
|
||||
|| no_symlinks && file_type.is_symlink())
|
||||
{
|
||||
result.push(Value::string(
|
||||
Some(Value::string(
|
||||
entry.into_path().to_string_lossy().to_string(),
|
||||
span,
|
||||
));
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(result)
|
||||
ListStream::new(result, span, signals.clone())
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ impl Command for UTouch {
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["create", "file"]
|
||||
vec!["create", "file", "coreutils"]
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
|
@ -206,7 +206,6 @@ fn select(
|
||||
let columns = new_columns;
|
||||
|
||||
let input = if !unique_rows.is_empty() {
|
||||
// let skip = call.has_flag(engine_state, stack, "skip")?;
|
||||
let metadata = input.metadata();
|
||||
let pipeline_iter: PipelineIterator = input.into_iter();
|
||||
|
||||
@ -231,37 +230,31 @@ fn select(
|
||||
Value::List {
|
||||
vals: input_vals, ..
|
||||
} => {
|
||||
let mut output = vec![];
|
||||
let mut columns_with_value = Vec::new();
|
||||
for input_val in input_vals {
|
||||
if !columns.is_empty() {
|
||||
let mut record = Record::new();
|
||||
for path in &columns {
|
||||
//FIXME: improve implementation to not clone
|
||||
match input_val.clone().follow_cell_path(&path.members, false) {
|
||||
Ok(fetcher) => {
|
||||
record.push(path.to_column_name(), fetcher);
|
||||
if !columns_with_value.contains(&path) {
|
||||
columns_with_value.push(path);
|
||||
Ok(input_vals
|
||||
.into_iter()
|
||||
.map(move |input_val| {
|
||||
if !columns.is_empty() {
|
||||
let mut record = Record::new();
|
||||
for path in &columns {
|
||||
//FIXME: improve implementation to not clone
|
||||
match input_val.clone().follow_cell_path(&path.members, false) {
|
||||
Ok(fetcher) => {
|
||||
record.push(path.to_column_name(), fetcher);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(e);
|
||||
Err(e) => return Value::error(e, call_span),
|
||||
}
|
||||
}
|
||||
|
||||
Value::record(record, span)
|
||||
} else {
|
||||
input_val.clone()
|
||||
}
|
||||
|
||||
output.push(Value::record(record, span))
|
||||
} else {
|
||||
output.push(input_val)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output.into_iter().into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
})
|
||||
.into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
_ => {
|
||||
if !columns.is_empty() {
|
||||
@ -286,31 +279,29 @@ fn select(
|
||||
}
|
||||
}
|
||||
PipelineData::ListStream(stream, metadata, ..) => {
|
||||
let mut values = vec![];
|
||||
|
||||
for x in stream {
|
||||
if !columns.is_empty() {
|
||||
let mut record = Record::new();
|
||||
for path in &columns {
|
||||
//FIXME: improve implementation to not clone
|
||||
match x.clone().follow_cell_path(&path.members, false) {
|
||||
Ok(value) => {
|
||||
record.push(path.to_column_name(), value);
|
||||
Ok(stream
|
||||
.map(move |x| {
|
||||
if !columns.is_empty() {
|
||||
let mut record = Record::new();
|
||||
for path in &columns {
|
||||
//FIXME: improve implementation to not clone
|
||||
match x.clone().follow_cell_path(&path.members, false) {
|
||||
Ok(value) => {
|
||||
record.push(path.to_column_name(), value);
|
||||
}
|
||||
Err(e) => return Value::error(e, call_span),
|
||||
}
|
||||
Err(e) => return Err(e),
|
||||
}
|
||||
Value::record(record, call_span)
|
||||
} else {
|
||||
x
|
||||
}
|
||||
values.push(Value::record(record, call_span));
|
||||
} else {
|
||||
values.push(x);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(values.into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
})
|
||||
.into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
_ => Ok(PipelineData::empty()),
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ mod join;
|
||||
mod parse;
|
||||
pub mod path_;
|
||||
mod relative_to;
|
||||
mod self_;
|
||||
mod split;
|
||||
mod r#type;
|
||||
|
||||
@ -18,6 +19,7 @@ pub use parse::SubCommand as PathParse;
|
||||
pub use path_::PathCommand as Path;
|
||||
pub use r#type::SubCommand as PathType;
|
||||
pub use relative_to::SubCommand as PathRelativeTo;
|
||||
pub use self_::SubCommand as PathSelf;
|
||||
pub use split::SubCommand as PathSplit;
|
||||
|
||||
use nu_protocol::{ShellError, Span, Value};
|
||||
|
129
crates/nu-command/src/path/self_.rs
Normal file
129
crates/nu-command/src/path/self_.rs
Normal file
@ -0,0 +1,129 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_path::expand_path_with;
|
||||
use nu_protocol::engine::StateWorkingSet;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
|
||||
impl Command for SubCommand {
|
||||
fn name(&self) -> &str {
|
||||
"path self"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path self")
|
||||
.input_output_type(Type::Nothing, Type::String)
|
||||
.allow_variants_without_examples(true)
|
||||
.optional(
|
||||
"path",
|
||||
SyntaxShape::Filepath,
|
||||
"Path to get instead of the current file.",
|
||||
)
|
||||
.category(Category::Path)
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Get the absolute path of the script or module containing this command at parse time."
|
||||
}
|
||||
|
||||
fn is_const(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
_engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
Err(ShellError::GenericError {
|
||||
error: "this command can only run during parse-time".into(),
|
||||
msg: "can't run after parse-time".into(),
|
||||
span: Some(call.head),
|
||||
help: Some("try assigning this command's output to a const variable".into()),
|
||||
inner: vec![],
|
||||
})
|
||||
}
|
||||
|
||||
fn run_const(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let path: Option<String> = call.opt_const(working_set, 0)?;
|
||||
let cwd = working_set.permanent_state.cwd(None)?;
|
||||
let current_file =
|
||||
working_set
|
||||
.files
|
||||
.top()
|
||||
.ok_or_else(|| ShellError::FileNotFoundCustom {
|
||||
msg: "Couldn't find current file".into(),
|
||||
span: call.head,
|
||||
})?;
|
||||
|
||||
let out = if let Some(path) = path {
|
||||
let dir = expand_path_with(
|
||||
current_file
|
||||
.parent()
|
||||
.ok_or_else(|| ShellError::FileNotFoundCustom {
|
||||
msg: "Couldn't find current file's parent.".into(),
|
||||
span: call.head,
|
||||
})?,
|
||||
&cwd,
|
||||
true,
|
||||
);
|
||||
Value::string(
|
||||
expand_path_with(path, dir, false).to_string_lossy(),
|
||||
call.head,
|
||||
)
|
||||
} else {
|
||||
Value::string(
|
||||
expand_path_with(current_file, &cwd, true).to_string_lossy(),
|
||||
call.head,
|
||||
)
|
||||
};
|
||||
|
||||
Ok(out.into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Get the path of the current file",
|
||||
example: r#"const current_file = path self"#,
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Get the path of the directory containing the current file",
|
||||
example: r#"const current_file = path self ."#,
|
||||
result: None,
|
||||
},
|
||||
#[cfg(windows)]
|
||||
Example {
|
||||
description: "Get the absolute form of a path relative to the current file",
|
||||
example: r#"const current_file = path self ..\foo"#,
|
||||
result: None,
|
||||
},
|
||||
#[cfg(not(windows))]
|
||||
Example {
|
||||
description: "Get the absolute form of a path relative to the current file",
|
||||
example: r#"const current_file = path self ../foo"#,
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
}
|
||||
}
|
@ -70,8 +70,8 @@ impl Command for Sleep {
|
||||
result: Some(Value::nothing(Span::test_data())),
|
||||
},
|
||||
Example {
|
||||
description: "Sleep for 3sec",
|
||||
example: "sleep 1sec 1sec 1sec",
|
||||
description: "Use multiple arguments to write a duration with multiple units, which is unsupported by duration literals",
|
||||
example: "sleep 1min 30sec",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
|
@ -131,14 +131,21 @@ fn http_delete_timeout() {
|
||||
let _mock = server
|
||||
.mock("DELETE", "/")
|
||||
.with_chunked_body(|w| {
|
||||
thread::sleep(Duration::from_secs(1));
|
||||
thread::sleep(Duration::from_secs(10));
|
||||
w.write_all(b"Delayed response!")
|
||||
})
|
||||
.create();
|
||||
|
||||
let actual = nu!(pipeline(
|
||||
format!("http delete --max-time 500ms {url}", url = server.url()).as_str()
|
||||
format!("http delete --max-time 100ms {url}", url = server.url()).as_str()
|
||||
));
|
||||
|
||||
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||
assert!(&actual.err.contains("nu::shell::network_failure"));
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
assert!(&actual.err.contains("timed out reading response"));
|
||||
#[cfg(target_os = "windows")]
|
||||
assert!(&actual
|
||||
.err
|
||||
.contains("did not properly respond after a period of time"));
|
||||
}
|
||||
|
@ -325,14 +325,21 @@ fn http_get_timeout() {
|
||||
let _mock = server
|
||||
.mock("GET", "/")
|
||||
.with_chunked_body(|w| {
|
||||
thread::sleep(Duration::from_secs(1));
|
||||
thread::sleep(Duration::from_secs(10));
|
||||
w.write_all(b"Delayed response!")
|
||||
})
|
||||
.create();
|
||||
|
||||
let actual = nu!(pipeline(
|
||||
format!("http get --max-time 500ms {url}", url = server.url()).as_str()
|
||||
format!("http get --max-time 100ms {url}", url = server.url()).as_str()
|
||||
));
|
||||
|
||||
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||
assert!(&actual.err.contains("nu::shell::network_failure"));
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
assert!(&actual.err.contains("timed out reading response"));
|
||||
#[cfg(target_os = "windows")]
|
||||
assert!(&actual
|
||||
.err
|
||||
.contains("did not properly respond after a period of time"));
|
||||
}
|
||||
|
@ -50,14 +50,21 @@ fn http_options_timeout() {
|
||||
let _mock = server
|
||||
.mock("OPTIONS", "/")
|
||||
.with_chunked_body(|w| {
|
||||
thread::sleep(Duration::from_secs(1));
|
||||
thread::sleep(Duration::from_secs(10));
|
||||
w.write_all(b"Delayed response!")
|
||||
})
|
||||
.create();
|
||||
|
||||
let actual = nu!(pipeline(
|
||||
format!("http options --max-time 500ms {url}", url = server.url()).as_str()
|
||||
format!("http options --max-time 100ms {url}", url = server.url()).as_str()
|
||||
));
|
||||
|
||||
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||
assert!(&actual.err.contains("nu::shell::network_failure"));
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
assert!(&actual.err.contains("timed out reading response"));
|
||||
#[cfg(target_os = "windows")]
|
||||
assert!(&actual
|
||||
.err
|
||||
.contains("did not properly respond after a period of time"));
|
||||
}
|
||||
|
@ -171,18 +171,25 @@ fn http_patch_timeout() {
|
||||
let _mock = server
|
||||
.mock("PATCH", "/")
|
||||
.with_chunked_body(|w| {
|
||||
thread::sleep(Duration::from_secs(1));
|
||||
thread::sleep(Duration::from_secs(10));
|
||||
w.write_all(b"Delayed response!")
|
||||
})
|
||||
.create();
|
||||
|
||||
let actual = nu!(pipeline(
|
||||
format!(
|
||||
"http patch --max-time 500ms {url} patchbody",
|
||||
"http patch --max-time 100ms {url} patchbody",
|
||||
url = server.url()
|
||||
)
|
||||
.as_str()
|
||||
));
|
||||
|
||||
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||
assert!(&actual.err.contains("nu::shell::network_failure"));
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
assert!(&actual.err.contains("timed out reading response"));
|
||||
#[cfg(target_os = "windows")]
|
||||
assert!(&actual
|
||||
.err
|
||||
.contains("did not properly respond after a period of time"));
|
||||
}
|
||||
|
@ -285,18 +285,25 @@ fn http_post_timeout() {
|
||||
let _mock = server
|
||||
.mock("POST", "/")
|
||||
.with_chunked_body(|w| {
|
||||
thread::sleep(Duration::from_secs(1));
|
||||
thread::sleep(Duration::from_secs(10));
|
||||
w.write_all(b"Delayed response!")
|
||||
})
|
||||
.create();
|
||||
|
||||
let actual = nu!(pipeline(
|
||||
format!(
|
||||
"http post --max-time 500ms {url} postbody",
|
||||
"http post --max-time 100ms {url} postbody",
|
||||
url = server.url()
|
||||
)
|
||||
.as_str()
|
||||
));
|
||||
|
||||
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||
assert!(&actual.err.contains("nu::shell::network_failure"));
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
assert!(&actual.err.contains("timed out reading response"));
|
||||
#[cfg(target_os = "windows")]
|
||||
assert!(&actual
|
||||
.err
|
||||
.contains("did not properly respond after a period of time"));
|
||||
}
|
||||
|
@ -171,18 +171,25 @@ fn http_put_timeout() {
|
||||
let _mock = server
|
||||
.mock("PUT", "/")
|
||||
.with_chunked_body(|w| {
|
||||
thread::sleep(Duration::from_secs(1));
|
||||
thread::sleep(Duration::from_secs(10));
|
||||
w.write_all(b"Delayed response!")
|
||||
})
|
||||
.create();
|
||||
|
||||
let actual = nu!(pipeline(
|
||||
format!(
|
||||
"http put --max-time 500ms {url} putbody",
|
||||
"http put --max-time 100ms {url} putbody",
|
||||
url = server.url()
|
||||
)
|
||||
.as_str()
|
||||
));
|
||||
|
||||
assert!(&actual.err.contains("nu::shell::io_error"));
|
||||
assert!(&actual.err.contains("nu::shell::network_failure"));
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
assert!(&actual.err.contains("timed out reading response"));
|
||||
#[cfg(target_os = "windows")]
|
||||
assert!(&actual
|
||||
.err
|
||||
.contains("did not properly respond after a period of time"));
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ fn complex_nested_columns() {
|
||||
fn fails_if_given_unknown_column_name() {
|
||||
let actual = nu!(pipeline(
|
||||
r#"
|
||||
echo [
|
||||
[
|
||||
[first_name, last_name, rusty_at, type];
|
||||
|
||||
[Andrés Robalino '10/11/2013' A]
|
||||
@ -71,7 +71,6 @@ fn fails_if_given_unknown_column_name() {
|
||||
[Yehuda Katz '10/11/2013' A]
|
||||
]
|
||||
| select rrusty_at first_name
|
||||
| length
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -284,9 +284,9 @@ fn use_main_def_known_external() {
|
||||
#[test]
|
||||
fn use_main_not_exported() {
|
||||
let inp = &[
|
||||
r#"module spam { def main [] { "spam" } }"#,
|
||||
r#"use spam"#,
|
||||
r#"spam"#,
|
||||
r#"module my-super-cool-and-unique-module-name { def main [] { "hi" } }"#,
|
||||
r#"use my-super-cool-and-unique-module-name"#,
|
||||
r#"my-super-cool-and-unique-module-name"#,
|
||||
];
|
||||
|
||||
let actual = nu!(&inp.join("; "));
|
||||
|
@ -11,13 +11,6 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
#[cfg(windows)]
|
||||
const ENV_PATH_NAME: &str = "Path";
|
||||
#[cfg(windows)]
|
||||
const ENV_PATH_NAME_SECONDARY: &str = "PATH";
|
||||
#[cfg(not(windows))]
|
||||
const ENV_PATH_NAME: &str = "PATH";
|
||||
|
||||
const ENV_CONVERSIONS: &str = "ENV_CONVERSIONS";
|
||||
|
||||
enum ConversionResult {
|
||||
@ -53,14 +46,14 @@ pub fn convert_env_values(engine_state: &mut EngineState, stack: &Stack) -> Resu
|
||||
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
error = error.or_else(|| ensure_path(&mut new_scope, ENV_PATH_NAME));
|
||||
error = error.or_else(|| ensure_path(&mut new_scope, "PATH"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let first_result = ensure_path(&mut new_scope, ENV_PATH_NAME);
|
||||
let first_result = ensure_path(&mut new_scope, "Path");
|
||||
if first_result.is_some() {
|
||||
let second_result = ensure_path(&mut new_scope, ENV_PATH_NAME_SECONDARY);
|
||||
let second_result = ensure_path(&mut new_scope, "PATH");
|
||||
|
||||
if second_result.is_some() {
|
||||
error = error.or(first_result);
|
||||
@ -107,7 +100,7 @@ pub fn env_to_string(
|
||||
ConversionResult::CellPathError => match value.coerce_string() {
|
||||
Ok(s) => Ok(s),
|
||||
Err(_) => {
|
||||
if env_name == ENV_PATH_NAME {
|
||||
if env_name.to_lowercase() == "path" {
|
||||
// Try to convert PATH/Path list to a string
|
||||
match value {
|
||||
Value::List { vals, .. } => {
|
||||
@ -216,31 +209,21 @@ pub fn current_dir_const(working_set: &StateWorkingSet) -> Result<PathBuf, Shell
|
||||
}
|
||||
|
||||
/// Get the contents of path environment variable as a list of strings
|
||||
///
|
||||
/// On non-Windows: It will fetch PATH
|
||||
/// On Windows: It will try to fetch Path first but if not present, try PATH
|
||||
pub fn path_str(
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
span: Span,
|
||||
) -> Result<String, ShellError> {
|
||||
let (pathname, pathval) = match stack.get_env_var(engine_state, ENV_PATH_NAME) {
|
||||
Some(v) => Ok((ENV_PATH_NAME, v)),
|
||||
None => {
|
||||
#[cfg(windows)]
|
||||
match stack.get_env_var(engine_state, ENV_PATH_NAME_SECONDARY) {
|
||||
Some(v) => Ok((ENV_PATH_NAME_SECONDARY, v)),
|
||||
None => Err(ShellError::EnvVarNotFoundAtRuntime {
|
||||
envvar_name: ENV_PATH_NAME_SECONDARY.to_string(),
|
||||
span,
|
||||
}),
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
Err(ShellError::EnvVarNotFoundAtRuntime {
|
||||
envvar_name: ENV_PATH_NAME.to_string(),
|
||||
span,
|
||||
})
|
||||
}
|
||||
let (pathname, pathval) = match stack.get_env_var_insensitive(engine_state, "path") {
|
||||
Some(v) => Ok((if cfg!(windows) { "Path" } else { "PATH" }, v)),
|
||||
None => Err(ShellError::EnvVarNotFoundAtRuntime {
|
||||
envvar_name: if cfg!(windows) {
|
||||
"Path".to_string()
|
||||
} else {
|
||||
"PATH".to_string()
|
||||
},
|
||||
span,
|
||||
}),
|
||||
}?;
|
||||
|
||||
env_to_string(pathname, pathval, engine_state, stack)
|
||||
|
@ -488,6 +488,7 @@ impl<'e, 's> ScopeData<'e, 's> {
|
||||
"description" => Value::string(module_desc, span),
|
||||
"extra_description" => Value::string(module_extra_desc, span),
|
||||
"module_id" => Value::int(module_id.get() as i64, span),
|
||||
"file" => Value::string(module.file.clone().map_or("unknown".to_string(), |(p, _)| p.path().to_string_lossy().to_string()), span),
|
||||
},
|
||||
span,
|
||||
)
|
||||
|
@ -7,7 +7,7 @@ use miette::{IntoDiagnostic, Result};
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
Value,
|
||||
Span, Value,
|
||||
};
|
||||
|
||||
impl LanguageServer {
|
||||
@ -28,6 +28,7 @@ impl LanguageServer {
|
||||
|
||||
let contents = rope_of_file.bytes().collect::<Vec<u8>>();
|
||||
let offset = working_set.next_span_start();
|
||||
working_set.files.push(file_path.into(), Span::unknown())?;
|
||||
parse(
|
||||
&mut working_set,
|
||||
Some(&file_path.to_string_lossy()),
|
||||
|
@ -274,6 +274,9 @@ impl LanguageServer {
|
||||
|
||||
// TODO: think about passing down the rope into the working_set
|
||||
let contents = file.bytes().collect::<Vec<u8>>();
|
||||
let _ = working_set
|
||||
.files
|
||||
.push(file_path.as_ref().into(), Span::unknown());
|
||||
let block = parse(working_set, Some(&file_path), &contents, false);
|
||||
let flattened = flatten_block(working_set, &block);
|
||||
|
||||
|
@ -2122,7 +2122,21 @@ pub fn parse_variable_expr(working_set: &mut StateWorkingSet, span: Span) -> Exp
|
||||
String::from_utf8_lossy(contents).to_string()
|
||||
};
|
||||
|
||||
if let Some(id) = parse_variable(working_set, span) {
|
||||
let bytes = working_set.get_span_contents(span);
|
||||
let suggestion = || {
|
||||
DidYouMean::new(
|
||||
&working_set.list_variables(),
|
||||
working_set.get_span_contents(span),
|
||||
)
|
||||
};
|
||||
if !is_variable(bytes) {
|
||||
working_set.error(ParseError::ExpectedWithDidYouMean(
|
||||
"valid variable name",
|
||||
suggestion(),
|
||||
span,
|
||||
));
|
||||
garbage(working_set, span)
|
||||
} else if let Some(id) = working_set.find_variable(bytes) {
|
||||
Expression::new(
|
||||
working_set,
|
||||
Expr::Var(id),
|
||||
@ -2133,9 +2147,7 @@ pub fn parse_variable_expr(working_set: &mut StateWorkingSet, span: Span) -> Exp
|
||||
working_set.error(ParseError::EnvVarNotVar(name, span));
|
||||
garbage(working_set, span)
|
||||
} else {
|
||||
let ws = &*working_set;
|
||||
let suggestion = DidYouMean::new(&ws.list_variables(), ws.get_span_contents(span));
|
||||
working_set.error(ParseError::VariableNotFound(suggestion, span));
|
||||
working_set.error(ParseError::VariableNotFound(suggestion(), span));
|
||||
garbage(working_set, span)
|
||||
}
|
||||
}
|
||||
@ -3349,26 +3361,66 @@ pub fn parse_input_output_types(
|
||||
}
|
||||
|
||||
pub fn parse_full_signature(working_set: &mut StateWorkingSet, spans: &[Span]) -> Expression {
|
||||
let arg_signature = working_set.get_span_contents(spans[0]);
|
||||
|
||||
if arg_signature.ends_with(b":") {
|
||||
let mut arg_signature =
|
||||
parse_signature(working_set, Span::new(spans[0].start, spans[0].end - 1));
|
||||
|
||||
let input_output_types = parse_input_output_types(working_set, &spans[1..]);
|
||||
|
||||
if let Expression {
|
||||
expr: Expr::Signature(sig),
|
||||
span: expr_span,
|
||||
..
|
||||
} = &mut arg_signature
|
||||
{
|
||||
sig.input_output_types = input_output_types;
|
||||
expr_span.end = Span::concat(&spans[1..]).end;
|
||||
match spans.len() {
|
||||
// This case should never happen. It corresponds to declarations like `def foo {}`,
|
||||
// which should throw a 'Missing required positional argument.' before getting to this point
|
||||
0 => {
|
||||
working_set.error(ParseError::InternalError(
|
||||
"failed to catch missing positional arguments".to_string(),
|
||||
Span::concat(spans),
|
||||
));
|
||||
garbage(working_set, Span::concat(spans))
|
||||
}
|
||||
|
||||
// e.g. `[ b"[foo: string]" ]`
|
||||
1 => parse_signature(working_set, spans[0]),
|
||||
|
||||
// This case is needed to distinguish between e.g.
|
||||
// `[ b"[]", b"{ true }" ]` vs `[ b"[]:", b"int" ]`
|
||||
2 if working_set.get_span_contents(spans[1]).starts_with(b"{") => {
|
||||
parse_signature(working_set, spans[0])
|
||||
}
|
||||
|
||||
// This should handle every other case, e.g.
|
||||
// `[ b"[]:", b"int" ]`
|
||||
// `[ b"[]", b":", b"int" ]`
|
||||
// `[ b"[]", b":", b"int", b"->", b"bool" ]`
|
||||
_ => {
|
||||
let (mut arg_signature, input_output_types_pos) =
|
||||
if working_set.get_span_contents(spans[0]).ends_with(b":") {
|
||||
(
|
||||
parse_signature(working_set, Span::new(spans[0].start, spans[0].end - 1)),
|
||||
1,
|
||||
)
|
||||
} else if working_set.get_span_contents(spans[1]) == b":" {
|
||||
(parse_signature(working_set, spans[0]), 2)
|
||||
} else {
|
||||
// This should be an error case, but we call parse_signature anyway
|
||||
// so it can handle the various possible errors
|
||||
// e.g. `[ b"[]", b"int" ]` or `[
|
||||
working_set.error(ParseError::Expected(
|
||||
"colon (:) before type signature",
|
||||
Span::concat(&spans[1..]),
|
||||
));
|
||||
// (garbage(working_set, Span::concat(spans)), 1)
|
||||
|
||||
(parse_signature(working_set, spans[0]), 1)
|
||||
};
|
||||
|
||||
let input_output_types =
|
||||
parse_input_output_types(working_set, &spans[input_output_types_pos..]);
|
||||
|
||||
if let Expression {
|
||||
expr: Expr::Signature(sig),
|
||||
span: expr_span,
|
||||
..
|
||||
} = &mut arg_signature
|
||||
{
|
||||
sig.input_output_types = input_output_types;
|
||||
expr_span.end = Span::concat(&spans[input_output_types_pos..]).end;
|
||||
}
|
||||
arg_signature
|
||||
}
|
||||
arg_signature
|
||||
} else {
|
||||
parse_signature(working_set, spans[0])
|
||||
}
|
||||
}
|
||||
|
||||
@ -5612,18 +5664,6 @@ pub fn parse_expression(working_set: &mut StateWorkingSet, spans: &[Span]) -> Ex
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_variable(working_set: &mut StateWorkingSet, span: Span) -> Option<VarId> {
|
||||
let bytes = working_set.get_span_contents(span);
|
||||
|
||||
if is_variable(bytes) {
|
||||
working_set.find_variable(bytes)
|
||||
} else {
|
||||
working_set.error(ParseError::Expected("valid variable name", span));
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_builtin_commands(
|
||||
working_set: &mut StateWorkingSet,
|
||||
lite_command: &LiteCommand,
|
||||
|
@ -2460,6 +2460,7 @@ mod input_types {
|
||||
|
||||
#[rstest]
|
||||
#[case::input_output(b"def q []: int -> int {1}", false)]
|
||||
#[case::input_output(b"def q [x: bool]: int -> int {2}", false)]
|
||||
#[case::input_output(b"def q []: string -> string {'qwe'}", false)]
|
||||
#[case::input_output(b"def q []: nothing -> nothing {null}", false)]
|
||||
#[case::input_output(b"def q []: list<string> -> list<string> {[]}", false)]
|
||||
@ -2479,6 +2480,42 @@ mod input_types {
|
||||
#[case::input_output(b"def q []: nothing -> record<c: int e: int {{c: 1 e: 1}}", true)]
|
||||
#[case::input_output(b"def q []: record<c: int e: int -> record<a: int> {{a: 1}}", true)]
|
||||
#[case::input_output(b"def q []: nothing -> record<a: record<a: int> {{a: {a: 1}}}", true)]
|
||||
#[case::input_output(b"def q []: int []}", true)]
|
||||
#[case::input_output(b"def q []: bool {[]", true)]
|
||||
// Type signature variants with whitespace between inputs and `:`
|
||||
#[case::input_output(b"def q [] : int -> int {1}", false)]
|
||||
#[case::input_output(b"def q [x: bool] : int -> int {2}", false)]
|
||||
#[case::input_output(b"def q []\t : string -> string {'qwe'}", false)]
|
||||
#[case::input_output(b"def q [] \t : nothing -> nothing {null}", false)]
|
||||
#[case::input_output(b"def q [] \t: list<string> -> list<string> {[]}", false)]
|
||||
#[case::input_output(
|
||||
b"def q []\t: record<a: int b: int> -> record<c: int e: int> {{c: 1 e: 1}}",
|
||||
false
|
||||
)]
|
||||
#[case::input_output(
|
||||
b"def q [] : table<a: int b: int> -> table<c: int e: int> {[{c: 1 e: 1}]}",
|
||||
false
|
||||
)]
|
||||
#[case::input_output(
|
||||
b"def q [] : nothing -> record<c: record<a: int b: int> e: int> {{c: {a: 1 b: 2} e: 1}}",
|
||||
false
|
||||
)]
|
||||
#[case::input_output(b"def q [] : nothing -> list<string {[]}", true)]
|
||||
#[case::input_output(b"def q [] : nothing -> record<c: int e: int {{c: 1 e: 1}}", true)]
|
||||
#[case::input_output(b"def q [] : record<c: int e: int -> record<a: int> {{a: 1}}", true)]
|
||||
#[case::input_output(b"def q [] : nothing -> record<a: record<a: int> {{a: {a: 1}}}", true)]
|
||||
#[case::input_output(b"def q [] : int []}", true)]
|
||||
#[case::input_output(b"def q [] : bool {[]", true)]
|
||||
// No input-output type signature
|
||||
#[case::input_output(b"def qq [] {[]}", false)]
|
||||
#[case::input_output(b"def q [] []}", true)]
|
||||
#[case::input_output(b"def q [] {", true)]
|
||||
#[case::input_output(b"def q []: []}", true)]
|
||||
#[case::input_output(b"def q [] int {}", true)]
|
||||
#[case::input_output(b"def q [x: string, y: int] {{c: 1 e: 1}}", false)]
|
||||
#[case::input_output(b"def q [x: string, y: int]: {}", true)]
|
||||
#[case::input_output(b"def q [x: string, y: int] {a: {a: 1}}", true)]
|
||||
#[case::input_output(b"def foo {3}", true)]
|
||||
#[case::vardecl(b"let a: int = 1", false)]
|
||||
#[case::vardecl(b"let a: string = 'qwe'", false)]
|
||||
#[case::vardecl(b"let a: nothing = null", false)]
|
||||
|
@ -126,7 +126,7 @@ impl<'a> PluginExecutionContext for PluginExecutionCommandContext<'a> {
|
||||
}
|
||||
|
||||
fn get_env_var(&self, name: &str) -> Result<Option<&Value>, ShellError> {
|
||||
Ok(self.stack.get_env_var(&self.engine_state, name))
|
||||
Ok(self.stack.get_env_var_insensitive(&self.engine_state, name))
|
||||
}
|
||||
|
||||
fn get_env_vars(&self) -> Result<HashMap<String, Value>, ShellError> {
|
||||
|
@ -14,6 +14,7 @@ use crate::{
|
||||
use fancy_regex::Regex;
|
||||
use lru::LruCache;
|
||||
use nu_path::AbsolutePathBuf;
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
num::NonZeroUsize,
|
||||
@ -465,20 +466,12 @@ impl EngineState {
|
||||
None
|
||||
}
|
||||
|
||||
// Get the path environment variable in a platform agnostic way
|
||||
pub fn get_path_env_var(&self) -> Option<&Value> {
|
||||
let env_path_name_windows: &str = "Path";
|
||||
let env_path_name_nix: &str = "PATH";
|
||||
|
||||
pub fn get_env_var_insensitive(&self, name: &str) -> Option<&Value> {
|
||||
for overlay_id in self.scope.active_overlays.iter().rev() {
|
||||
let overlay_name = String::from_utf8_lossy(self.get_overlay_name(*overlay_id));
|
||||
if let Some(env_vars) = self.env_vars.get(overlay_name.as_ref()) {
|
||||
if let Some(val) = env_vars.get(env_path_name_nix) {
|
||||
return Some(val);
|
||||
} else if let Some(val) = env_vars.get(env_path_name_windows) {
|
||||
return Some(val);
|
||||
} else {
|
||||
return None;
|
||||
if let Some(v) = env_vars.iter().find(|(k, _)| k.eq_ignore_case(name)) {
|
||||
return Some(v.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ use crate::{
|
||||
},
|
||||
Config, IntoValue, OutDest, ShellError, Span, Value, VarId, ENV_VARIABLE_ID, NU_VARIABLE_ID,
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fs::File,
|
||||
@ -494,6 +495,40 @@ impl Stack {
|
||||
None
|
||||
}
|
||||
|
||||
// Case-Insensitive version of get_env_var
|
||||
pub fn get_env_var_insensitive<'a>(
|
||||
&'a self,
|
||||
engine_state: &'a EngineState,
|
||||
name: &str,
|
||||
) -> Option<&'a Value> {
|
||||
for scope in self.env_vars.iter().rev() {
|
||||
for active_overlay in self.active_overlays.iter().rev() {
|
||||
if let Some(env_vars) = scope.get(active_overlay) {
|
||||
if let Some(v) = env_vars.iter().find(|(k, _)| k.eq_ignore_case(name)) {
|
||||
return Some(v.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for active_overlay in self.active_overlays.iter().rev() {
|
||||
let is_hidden = if let Some(env_hidden) = self.env_hidden.get(active_overlay) {
|
||||
env_hidden.iter().any(|k| k.eq_ignore_case(name))
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if !is_hidden {
|
||||
if let Some(env_vars) = engine_state.env_vars.get(active_overlay) {
|
||||
if let Some(v) = env_vars.iter().find(|(k, _)| k.eq_ignore_case(name)) {
|
||||
return Some(v.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn has_env_var(&self, engine_state: &EngineState, name: &str) -> bool {
|
||||
for scope in self.env_vars.iter().rev() {
|
||||
for active_overlay in self.active_overlays.iter().rev() {
|
||||
|
@ -55,6 +55,10 @@ pub enum ParseError {
|
||||
#[diagnostic(code(nu::parser::parse_mismatch_with_full_string_msg))]
|
||||
ExpectedWithStringMsg(String, #[label("expected {0}")] Span),
|
||||
|
||||
#[error("Parse mismatch during operation.")]
|
||||
#[diagnostic(code(nu::parser::parse_mismatch_with_did_you_mean))]
|
||||
ExpectedWithDidYouMean(&'static str, DidYouMean, #[label("expected {0}. {1}")] Span),
|
||||
|
||||
#[error("Command does not support {0} input.")]
|
||||
#[diagnostic(code(nu::parser::input_type_mismatch))]
|
||||
InputMismatch(Type, #[label("command doesn't support {0} input")] Span),
|
||||
@ -551,6 +555,7 @@ impl ParseError {
|
||||
ParseError::Unbalanced(_, _, s) => *s,
|
||||
ParseError::Expected(_, s) => *s,
|
||||
ParseError::ExpectedWithStringMsg(_, s) => *s,
|
||||
ParseError::ExpectedWithDidYouMean(_, _, s) => *s,
|
||||
ParseError::Mismatch(_, _, s) => *s,
|
||||
ParseError::UnsupportedOperationLHS(_, _, s, _) => *s,
|
||||
ParseError::UnsupportedOperationRHS(_, _, _, _, s, _) => *s,
|
||||
|
@ -1220,10 +1220,10 @@ pub enum ShellError {
|
||||
span: Span,
|
||||
},
|
||||
|
||||
/// Return event, which may become an error if used outside of a function
|
||||
#[error("Return used outside of function")]
|
||||
/// Return event, which may become an error if used outside of a custom command or closure
|
||||
#[error("Return used outside of custom command or closure")]
|
||||
Return {
|
||||
#[label("used outside of function")]
|
||||
#[label("used outside of custom command or closure")]
|
||||
span: Span,
|
||||
value: Box<Value>,
|
||||
},
|
||||
@ -1544,8 +1544,8 @@ impl From<io::Error> for ShellError {
|
||||
impl From<Spanned<io::Error>> for ShellError {
|
||||
fn from(error: Spanned<io::Error>) -> Self {
|
||||
let Spanned { item: error, span } = error;
|
||||
if error.kind() == io::ErrorKind::Other {
|
||||
match error.into_inner() {
|
||||
match error.kind() {
|
||||
io::ErrorKind::Other => match error.into_inner() {
|
||||
Some(err) => match err.downcast() {
|
||||
Ok(err) => *err,
|
||||
Err(err) => Self::IOErrorSpanned {
|
||||
@ -1557,12 +1557,15 @@ impl From<Spanned<io::Error>> for ShellError {
|
||||
msg: "unknown error".into(),
|
||||
span,
|
||||
},
|
||||
}
|
||||
} else {
|
||||
Self::IOErrorSpanned {
|
||||
},
|
||||
io::ErrorKind::TimedOut => Self::NetworkFailure {
|
||||
msg: error.to_string(),
|
||||
span,
|
||||
}
|
||||
},
|
||||
_ => Self::IOErrorSpanned {
|
||||
msg: error.to_string(),
|
||||
span,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -167,7 +167,7 @@ impl Module {
|
||||
vec![]
|
||||
} else {
|
||||
vec![(
|
||||
final_name.clone(),
|
||||
normalize_module_name(&final_name),
|
||||
Value::record(
|
||||
const_rows
|
||||
.into_iter()
|
||||
@ -425,3 +425,32 @@ impl Module {
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
/// normalize module names for exporting as record constant
|
||||
fn normalize_module_name(bytes: &[u8]) -> Vec<u8> {
|
||||
bytes
|
||||
.iter()
|
||||
.map(|x| match is_identifier_byte(*x) {
|
||||
true => *x,
|
||||
false => b'_',
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_identifier_byte(b: u8) -> bool {
|
||||
b != b'.'
|
||||
&& b != b'['
|
||||
&& b != b'('
|
||||
&& b != b'{'
|
||||
&& b != b'+'
|
||||
&& b != b'-'
|
||||
&& b != b'*'
|
||||
&& b != b'^'
|
||||
&& b != b'/'
|
||||
&& b != b'='
|
||||
&& b != b'!'
|
||||
&& b != b'<'
|
||||
&& b != b'>'
|
||||
&& b != b'&'
|
||||
&& b != b'|'
|
||||
}
|
||||
|
@ -1,4 +1,7 @@
|
||||
use nu_protocol::{Config, Span, Value};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
Config, Span, Value,
|
||||
};
|
||||
use rstest::rstest;
|
||||
|
||||
#[test]
|
||||
@ -46,3 +49,27 @@ fn test_duration_to_string(#[case] in_ns: i64, #[case] expected: &str) {
|
||||
"expected != observed"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_case_insensitive_env_var() {
|
||||
let mut engine_state = EngineState::new();
|
||||
let stack = Stack::new();
|
||||
|
||||
for (name, value) in std::env::vars() {
|
||||
engine_state.add_env_var(name, Value::test_string(value));
|
||||
}
|
||||
|
||||
let path_lower = engine_state.get_env_var_insensitive("path");
|
||||
let path_upper = engine_state.get_env_var_insensitive("PATH");
|
||||
let path_mixed = engine_state.get_env_var_insensitive("PaTh");
|
||||
|
||||
assert_eq!(path_lower, path_upper);
|
||||
assert_eq!(path_lower, path_mixed);
|
||||
|
||||
let stack_path_lower = stack.get_env_var_insensitive(&engine_state, "path");
|
||||
let stack_path_upper = stack.get_env_var_insensitive(&engine_state, "PATH");
|
||||
let stack_path_mixed = stack.get_env_var_insensitive(&engine_state, "PaTh");
|
||||
|
||||
assert_eq!(stack_path_lower, stack_path_upper);
|
||||
assert_eq!(stack_path_lower, stack_path_mixed);
|
||||
}
|
||||
|
@ -36,5 +36,5 @@ export def "from ndnuon" []: [string -> any] {
|
||||
|
||||
# Convert structured data to NDNUON, i.e. newline-delimited NUON
|
||||
export def "to ndnuon" []: [any -> string] {
|
||||
each { to nuon --raw } | to text
|
||||
each { to nuon --raw | str replace --all "\n" '\n' } | to text
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ def valid-annotations [] {
|
||||
# Returns a table containing the list of function names together with their annotations (comments above the declaration)
|
||||
def get-annotated [
|
||||
file: path
|
||||
] path -> table<function_name: string, annotation: string> {
|
||||
]: path -> table<function_name: string, annotation: string> {
|
||||
let raw_file = (
|
||||
open $file
|
||||
| lines
|
||||
@ -59,7 +59,7 @@ def get-annotated [
|
||||
# Annotations that allow multiple functions are of type list<string>
|
||||
# Other annotations are of type string
|
||||
# Result gets merged with the template record so that the output shape remains consistent regardless of the table content
|
||||
def create-test-record [] nothing -> record<before-each: string, after-each: string, before-all: string, after-all: string, test: list<string>, test-skip: list<string>> {
|
||||
def create-test-record []: nothing -> record<before-each: string, after-each: string, before-all: string, after-all: string, test: list<string>, test-skip: list<string>> {
|
||||
let input = $in
|
||||
|
||||
let template_record = {
|
||||
@ -187,7 +187,7 @@ export def ($test_function_name) [] {
|
||||
def run-tests-for-module [
|
||||
module: record<file: path name: string before-each: string after-each: string before-all: string after-all: string test: list test-skip: list>
|
||||
threads: int
|
||||
] -> table<file: path, name: string, test: string, result: string> {
|
||||
]: nothing -> table<file: path, name: string, test: string, result: string> {
|
||||
let global_context = if not ($module.before-all|is-empty) {
|
||||
log info $"Running before-all for module ($module.name)"
|
||||
run-test {
|
||||
|
@ -128,3 +128,17 @@ def to_ndnuon_single_object [] {
|
||||
let expect = "{a: 1}"
|
||||
assert equal $result $expect "could not convert to NDNUON"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def to_ndnuon_multiline_strings [] {
|
||||
let result = "foo\n\\n\nbar" | to ndnuon
|
||||
let expect = '"foo\n\\n\nbar"'
|
||||
assert equal $result $expect "could not convert multiline string to NDNUON"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def from_ndnuon_multiline_strings [] {
|
||||
let result = '"foo\n\\n\nbar"' | from ndnuon
|
||||
let expect = ["foo\n\\n\nbar"]
|
||||
assert equal $result $expect "could not convert multiline string from NDNUON"
|
||||
}
|
||||
|
@ -128,3 +128,17 @@ def to_ndnuon_single_object [] {
|
||||
let expect = "{a: 1}"
|
||||
assert equal $result $expect "could not convert to NDNUON"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def to_ndnuon_multiline_strings [] {
|
||||
let result = "foo\n\\n\nbar" | formats to ndnuon
|
||||
let expect = '"foo\n\\n\nbar"'
|
||||
assert equal $result $expect "could not convert multiline string to NDNUON"
|
||||
}
|
||||
|
||||
#[test]
|
||||
def from_ndnuon_multiline_strings [] {
|
||||
let result = '"foo\n\\n\nbar"' | formats from ndnuon
|
||||
let expect = ["foo\n\\n\nbar"]
|
||||
assert equal $result $expect "could not convert multiline string from NDNUON"
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ nu-utils = { path = "../nu-utils", version = "0.100.1" }
|
||||
chrono = { workspace = true, features = ["std", "unstable-locales"], default-features = false }
|
||||
chrono-tz = "0.10"
|
||||
fancy-regex = { workspace = true }
|
||||
indexmap = { version = "2.6" }
|
||||
indexmap = { version = "2.7" }
|
||||
mimalloc = { version = "0.1.42" }
|
||||
num = {version = "0.4"}
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
@ -6,8 +6,8 @@ use crate::{
|
||||
};
|
||||
use nu_plugin::{EngineInterface, EvaluatedCall, PluginCommand};
|
||||
use nu_protocol::{
|
||||
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, SyntaxShape, Type,
|
||||
Value,
|
||||
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, Spanned,
|
||||
SyntaxShape, Type, Value,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -26,7 +26,7 @@ impl PluginCommand for WithColumn {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.named("name", SyntaxShape::String, "new column name", Some('n'))
|
||||
.named("name", SyntaxShape::String, "New column name. For lazy dataframes and expressions syntax, use a `polars as` expression to name a column.", Some('n'))
|
||||
.rest(
|
||||
"series or expressions",
|
||||
SyntaxShape::Any,
|
||||
@ -138,6 +138,15 @@ fn command_eager(
|
||||
let column_span = new_column.span();
|
||||
|
||||
if NuExpression::can_downcast(&new_column) {
|
||||
if let Some(name) = call.get_flag::<Spanned<String>>("name")? {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Flag 'name' is unsuppored when used with expressions. Please use the `polars as` expression to name a column".into(),
|
||||
msg: "".into(),
|
||||
span: Some(name.span),
|
||||
help: Some("Use a `polars as` expression to name a column".into()),
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
let vals: Vec<Value> = call.rest(0)?;
|
||||
let value = Value::list(vals, call.head);
|
||||
let expressions = NuExpression::extract_exprs(plugin, value)?;
|
||||
@ -177,6 +186,16 @@ fn command_lazy(
|
||||
call: &EvaluatedCall,
|
||||
lazy: NuLazyFrame,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
if let Some(name) = call.get_flag::<Spanned<String>>("name")? {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Flag 'name' is unsuppored for lazy dataframes. Please use the `polars as` expression to name a column".into(),
|
||||
msg: "".into(),
|
||||
span: Some(name.span),
|
||||
help: Some("Use a `polars as` expression to name a column".into()),
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let vals: Vec<Value> = call.rest(0)?;
|
||||
let value = Value::list(vals, call.head);
|
||||
let expressions = NuExpression::extract_exprs(plugin, value)?;
|
||||
|
@ -24,6 +24,7 @@ fn find_id(
|
||||
) -> Option<(Id, usize, Span)> {
|
||||
let file_id = working_set.add_file(file_path.to_string(), file);
|
||||
let offset = working_set.get_span_for_file(file_id).start;
|
||||
let _ = working_set.files.push(file_path.into(), Span::unknown());
|
||||
let block = parse(working_set, Some(file_path), file, false);
|
||||
let flattened = flatten_block(working_set, &block);
|
||||
|
||||
@ -88,6 +89,7 @@ pub fn check(engine_state: &mut EngineState, file_path: &str, max_errors: &Value
|
||||
|
||||
if let Ok(contents) = file {
|
||||
let offset = working_set.next_span_start();
|
||||
let _ = working_set.files.push(file_path.into(), Span::unknown());
|
||||
let block = parse(&mut working_set, Some(file_path), &contents, false);
|
||||
|
||||
for (idx, err) in working_set.parse_errors.iter().enumerate() {
|
||||
@ -631,6 +633,7 @@ pub fn ast(engine_state: &mut EngineState, file_path: &str) {
|
||||
|
||||
if let Ok(contents) = file {
|
||||
let offset = working_set.next_span_start();
|
||||
let _ = working_set.files.push(file_path.into(), Span::unknown());
|
||||
let parsed_block = parse(&mut working_set, Some(file_path), &contents, false);
|
||||
|
||||
let flat = flatten_block(&working_set, &parsed_block);
|
||||
|
@ -807,10 +807,10 @@ fn overlay_can_add_renamed_overlay() {
|
||||
#[test]
|
||||
fn overlay_hide_renamed_overlay() {
|
||||
let inp = &[
|
||||
r#"module spam { export def foo [] { "foo" } }"#,
|
||||
r#"module spam { export def foo-command-which-does-not-conflict [] { "foo" } }"#,
|
||||
"overlay use spam as eggs",
|
||||
"overlay hide eggs",
|
||||
"foo",
|
||||
"foo-command-which-does-not-conflict",
|
||||
];
|
||||
|
||||
let actual = nu!(&inp.join("; "));
|
||||
@ -1243,9 +1243,9 @@ fn overlay_use_main_def_known_external() {
|
||||
#[test]
|
||||
fn overlay_use_main_not_exported() {
|
||||
let inp = &[
|
||||
r#"module foo { def main [] { "foo" } }"#,
|
||||
"overlay use foo",
|
||||
"foo",
|
||||
r#"module my-super-cool-and-unique-module-name { def main [] { "hi" } }"#,
|
||||
"overlay use my-super-cool-and-unique-module-name",
|
||||
"my-super-cool-and-unique-module-name",
|
||||
];
|
||||
|
||||
let actual = nu!(&inp.join("; "));
|
||||
@ -1257,11 +1257,11 @@ fn overlay_use_main_not_exported() {
|
||||
fn alias_overlay_hide() {
|
||||
let inp = &[
|
||||
"overlay new spam",
|
||||
"def foo [] { 'foo' }",
|
||||
"def my-epic-command-name [] { 'foo' }",
|
||||
"overlay new eggs",
|
||||
"alias oh = overlay hide",
|
||||
"oh spam",
|
||||
"foo",
|
||||
"my-epic-command-name",
|
||||
];
|
||||
|
||||
let actual = nu!(&inp.join("; "));
|
||||
|
@ -1,4 +1,5 @@
|
||||
use crate::repl::tests::{fail_test, run_test, TestResult};
|
||||
use rstest::rstest;
|
||||
|
||||
#[test]
|
||||
fn module_def_imports_1() -> TestResult {
|
||||
@ -145,6 +146,28 @@ fn export_module_which_defined_const() -> TestResult {
|
||||
)
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case("spam-mod")]
|
||||
#[case("spam/mod")]
|
||||
#[case("spam=mod")]
|
||||
fn export_module_with_normalized_var_name(#[case] name: &str) -> TestResult {
|
||||
let def = format!(
|
||||
"module {name} {{ export const b = 3; export module {name}2 {{ export const c = 4 }} }}"
|
||||
);
|
||||
run_test(&format!("{def}; use {name}; $spam_mod.b"), "3")?;
|
||||
run_test(&format!("{def}; use {name} *; $spam_mod2.c"), "4")
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case("spam-mod")]
|
||||
#[case("spam/mod")]
|
||||
fn use_module_with_invalid_var_name(#[case] name: &str) -> TestResult {
|
||||
fail_test(
|
||||
&format!("module {name} {{ export const b = 3 }}; use {name}; ${name}"),
|
||||
"expected valid variable name. Did you mean '$spam_mod'",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cannot_export_private_const() -> TestResult {
|
||||
fail_test(
|
||||
|
Loading…
Reference in New Issue
Block a user