mirror of
https://github.com/nushell/nushell.git
synced 2025-02-18 03:21:05 +01:00
PWD per drive
This commit is contained in:
parent
3893fbb0b1
commit
a84a859755
24
Cargo.toml
24
Cargo.toml
@ -109,7 +109,7 @@ md5 = { version = "0.10", package = "md-5" }
|
||||
miette = "7.2"
|
||||
mime = "0.3.17"
|
||||
mime_guess = "2.0"
|
||||
mockito = { version = "1.5", default-features = false }
|
||||
mockito = { version = "1.6", default-features = false }
|
||||
multipart-rs = "0.1.11"
|
||||
native-tls = "0.2"
|
||||
nix = { version = "0.29", default-features = false }
|
||||
@ -143,7 +143,7 @@ rmp = "0.8"
|
||||
rmp-serde = "1.3"
|
||||
ropey = "1.6.1"
|
||||
roxmltree = "0.19"
|
||||
rstest = { version = "0.18", default-features = false }
|
||||
rstest = { version = "0.23", default-features = false }
|
||||
rusqlite = "0.31"
|
||||
rust-embed = "8.5.0"
|
||||
serde = { version = "1.0" }
|
||||
@ -155,7 +155,7 @@ strip-ansi-escapes = "0.2.0"
|
||||
syn = "2.0"
|
||||
sysinfo = "0.32"
|
||||
tabled = { version = "0.16.0", default-features = false }
|
||||
tempfile = "3.13"
|
||||
tempfile = "3.14"
|
||||
terminal_size = "0.3"
|
||||
titlecase = "2.0"
|
||||
toml = "0.8"
|
||||
@ -165,13 +165,14 @@ unicode-segmentation = "1.12"
|
||||
unicode-width = "0.1"
|
||||
ureq = { version = "2.10", default-features = false }
|
||||
url = "2.2"
|
||||
uu_cp = "0.0.27"
|
||||
uu_mkdir = "0.0.27"
|
||||
uu_mktemp = "0.0.27"
|
||||
uu_mv = "0.0.27"
|
||||
uu_whoami = "0.0.27"
|
||||
uu_uname = "0.0.27"
|
||||
uucore = "0.0.27"
|
||||
uu_cp = "0.0.28"
|
||||
uu_mkdir = "0.0.28"
|
||||
uu_mktemp = "0.0.28"
|
||||
uu_mv = "0.0.28"
|
||||
uu_touch = "0.0.28"
|
||||
uu_whoami = "0.0.28"
|
||||
uu_uname = "0.0.28"
|
||||
uucore = "0.0.28"
|
||||
uuid = "1.11.0"
|
||||
v_htmlescape = "0.15.0"
|
||||
wax = "0.6"
|
||||
@ -321,3 +322,6 @@ bench = false
|
||||
[[bench]]
|
||||
name = "benchmarks"
|
||||
harness = false
|
||||
|
||||
[profile.dev]
|
||||
incremental = true
|
||||
|
@ -46,9 +46,6 @@ fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
|
||||
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// Support running benchmarks without IR mode
|
||||
stack.use_ir = std::env::var_os("NU_DISABLE_IR").is_none();
|
||||
|
||||
evaluate_commands(
|
||||
&commands,
|
||||
&mut engine,
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::NushellPrompt;
|
||||
use log::trace;
|
||||
use log::{trace, warn};
|
||||
use nu_engine::ClosureEvalOnce;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
@ -30,30 +30,21 @@ pub(crate) const TRANSIENT_PROMPT_MULTILINE_INDICATOR: &str =
|
||||
pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
|
||||
pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
|
||||
pub(crate) const PRE_EXECUTION_MARKER: &str = "\x1b]133;C\x1b\\";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]133;D;";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
|
||||
|
||||
// OSC633 is the same as OSC133 but specifically for VSCode
|
||||
pub(crate) const VSCODE_PRE_PROMPT_MARKER: &str = "\x1b]633;A\x1b\\";
|
||||
pub(crate) const VSCODE_POST_PROMPT_MARKER: &str = "\x1b]633;B\x1b\\";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const VSCODE_PRE_EXECUTION_MARKER: &str = "\x1b]633;C\x1b\\";
|
||||
#[allow(dead_code)]
|
||||
//"\x1b]633;D;{}\x1b\\"
|
||||
pub(crate) const VSCODE_POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]633;D;";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const VSCODE_POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
|
||||
#[allow(dead_code)]
|
||||
//"\x1b]633;E;{}\x1b\\"
|
||||
pub(crate) const VSCODE_COMMANDLINE_MARKER_PREFIX: &str = "\x1b]633;E;";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const VSCODE_COMMANDLINE_MARKER_SUFFIX: &str = "\x1b\\";
|
||||
#[allow(dead_code)]
|
||||
// "\x1b]633;P;Cwd={}\x1b\\"
|
||||
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_PREFIX: &str = "\x1b]633;P;Cwd=";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_SUFFIX: &str = "\x1b\\";
|
||||
|
||||
pub(crate) const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
|
||||
@ -89,8 +80,13 @@ fn get_prompt_string(
|
||||
})
|
||||
.and_then(|pipeline_data| {
|
||||
let output = pipeline_data.collect_string("", config).ok();
|
||||
let ansi_output = output.map(|mut x| {
|
||||
// Always reset the color at the start of the right prompt
|
||||
// to ensure there is no ansi bleed over
|
||||
if x.is_empty() && prompt == PROMPT_COMMAND_RIGHT {
|
||||
x.insert_str(0, "\x1b[0m")
|
||||
};
|
||||
|
||||
output.map(|mut x| {
|
||||
// Just remove the very last newline.
|
||||
if x.ends_with('\n') {
|
||||
x.pop();
|
||||
@ -100,7 +96,11 @@ fn get_prompt_string(
|
||||
x.pop();
|
||||
}
|
||||
x
|
||||
})
|
||||
});
|
||||
// Let's keep this for debugging purposes with nu --log-level warn
|
||||
warn!("{}:{}:{} {:?}", file!(), line!(), column!(), ansi_output);
|
||||
|
||||
ansi_output
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -711,7 +711,6 @@ pub(crate) fn create_keybindings(config: &Config) -> Result<KeybindingsMode, She
|
||||
}
|
||||
for keybinding in parsed_keybindings {
|
||||
add_keybinding(
|
||||
&keybinding.name,
|
||||
&keybinding.mode,
|
||||
keybinding,
|
||||
config,
|
||||
@ -730,9 +729,7 @@ pub(crate) fn create_keybindings(config: &Config) -> Result<KeybindingsMode, She
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::only_used_in_recursion)]
|
||||
fn add_keybinding(
|
||||
name: &Option<Value>,
|
||||
mode: &Value,
|
||||
keybinding: &ParsedKeybinding,
|
||||
config: &Config,
|
||||
@ -755,7 +752,6 @@ fn add_keybinding(
|
||||
Value::List { vals, .. } => {
|
||||
for inner_mode in vals {
|
||||
add_keybinding(
|
||||
name,
|
||||
inner_mode,
|
||||
keybinding,
|
||||
config,
|
||||
|
@ -306,9 +306,6 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
if let Err(err) = engine_state.merge_env(&mut stack) {
|
||||
report_shell_error(engine_state, &err);
|
||||
}
|
||||
// Check whether $env.NU_DISABLE_IR is set, so that the user can change it in the REPL
|
||||
// Temporary while IR eval is optional
|
||||
stack.use_ir = !stack.has_env_var(engine_state, "NU_DISABLE_IR");
|
||||
perf!("merge env", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
@ -861,6 +858,8 @@ fn do_auto_cd(
|
||||
report_shell_error(engine_state, &err);
|
||||
return;
|
||||
};
|
||||
use nu_path::pwd_per_drive::pwd_per_drive::set_pwd_per_drive;
|
||||
let _as_is = set_pwd_per_drive(PathBuf::from(path.clone()).as_path());
|
||||
let cwd = Value::string(cwd, span);
|
||||
|
||||
let shells = stack.get_env_var(engine_state, "NUSHELL_SHELLS");
|
||||
|
@ -82,9 +82,6 @@ impl Command for Do {
|
||||
bind_args_to(&mut callee_stack, &block.signature, rest, head)?;
|
||||
let eval_block_with_early_return = get_eval_block_with_early_return(engine_state);
|
||||
|
||||
// Applies to all block evaluation once set true
|
||||
callee_stack.use_ir = !caller_stack.has_env_var(engine_state, "NU_DISABLE_IR");
|
||||
|
||||
let result = eval_block_with_early_return(engine_state, &mut callee_stack, block, input);
|
||||
|
||||
if has_env {
|
||||
|
@ -96,6 +96,7 @@ uu_cp = { workspace = true }
|
||||
uu_mkdir = { workspace = true }
|
||||
uu_mktemp = { workspace = true }
|
||||
uu_mv = { workspace = true }
|
||||
uu_touch = { workspace = true }
|
||||
uu_uname = { workspace = true }
|
||||
uu_whoami = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4"] }
|
||||
|
@ -30,8 +30,6 @@ impl Command for DebugProfile {
|
||||
"Collect pipeline element output values",
|
||||
Some('v'),
|
||||
)
|
||||
.switch("expr", "Collect expression types", Some('x'))
|
||||
.switch("instructions", "Collect IR instructions", Some('i'))
|
||||
.switch("lines", "Collect line numbers", Some('l'))
|
||||
.named(
|
||||
"max-depth",
|
||||
@ -48,37 +46,52 @@ impl Command for DebugProfile {
|
||||
}
|
||||
|
||||
fn extra_description(&self) -> &str {
|
||||
r#"The profiler profiles every evaluated pipeline element inside a closure, stepping into all
|
||||
r#"The profiler profiles every evaluated instruction inside a closure, stepping into all
|
||||
commands calls and other blocks/closures.
|
||||
|
||||
The output can be heavily customized. By default, the following columns are included:
|
||||
- depth : Depth of the pipeline element. Each entered block adds one level of depth. How many
|
||||
- depth : Depth of the instruction. Each entered block adds one level of depth. How many
|
||||
blocks deep to step into is controlled with the --max-depth option.
|
||||
- id : ID of the pipeline element
|
||||
- parent_id : ID of the parent element
|
||||
- source : Source code of the pipeline element. If the element has multiple lines, only the
|
||||
first line is used and `...` is appended to the end. Full source code can be shown
|
||||
with the --expand-source flag.
|
||||
- duration_ms : How long it took to run the pipeline element in milliseconds.
|
||||
- (optional) span : Span of the element. Can be viewed via the `view span` command. Enabled with
|
||||
the --spans flag.
|
||||
- (optional) expr : The type of expression of the pipeline element. Enabled with the --expr flag.
|
||||
- (optional) output : The output value of the pipeline element. Enabled with the --values flag.
|
||||
- id : ID of the instruction
|
||||
- parent_id : ID of the instruction that created the parent scope
|
||||
- source : Source code that generated the instruction. If the source code has multiple lines,
|
||||
only the first line is used and `...` is appended to the end. Full source code can
|
||||
be shown with the --expand-source flag.
|
||||
- pc : The index of the instruction within the block.
|
||||
- instruction : The pretty printed instruction being evaluated.
|
||||
- duration_ms : How long it took to run the instruction in milliseconds.
|
||||
- (optional) span : Span associated with the instruction. Can be viewed via the `view span`
|
||||
command. Enabled with the --spans flag.
|
||||
- (optional) output : The output value of the instruction. Enabled with the --values flag.
|
||||
|
||||
To illustrate the depth and IDs, consider `debug profile { if true { echo 'spam' } }`. There are
|
||||
three pipeline elements:
|
||||
To illustrate the depth and IDs, consider `debug profile { do { if true { echo 'spam' } } }`. A unique ID is generated each time an instruction is executed, and there are two levels of depth:
|
||||
|
||||
depth id parent_id
|
||||
0 0 0 debug profile { do { if true { 'spam' } } }
|
||||
1 1 0 if true { 'spam' }
|
||||
2 2 1 'spam'
|
||||
```
|
||||
depth id parent_id source pc instruction
|
||||
0 0 0 debug profile { do { if true { 'spam' } } } 0 <start>
|
||||
1 1 0 { if true { 'spam' } } 0 load-literal %1, closure(2164)
|
||||
1 2 0 { if true { 'spam' } } 1 push-positional %1
|
||||
1 3 0 { do { if true { 'spam' } } } 2 redirect-out caller
|
||||
1 4 0 { do { if true { 'spam' } } } 3 redirect-err caller
|
||||
1 5 0 do 4 call decl 7 "do", %0
|
||||
2 6 5 true 0 load-literal %1, bool(true)
|
||||
2 7 5 if 1 not %1
|
||||
2 8 5 if 2 branch-if %1, 5
|
||||
2 9 5 'spam' 3 load-literal %0, string("spam")
|
||||
2 10 5 if 4 jump 6
|
||||
2 11 5 { if true { 'spam' } } 6 return %0
|
||||
1 12 0 { do { if true { 'spam' } } } 5 return %0
|
||||
```
|
||||
|
||||
Each block entered increments depth by 1 and each block left decrements it by one. This way you can
|
||||
control the profiling granularity. Passing --max-depth=1 to the above would stop at
|
||||
`if true { 'spam' }`. The id is used to identify each element. The parent_id tells you that 'spam'
|
||||
was spawned from `if true { 'spam' }` which was spawned from the root `debug profile { ... }`.
|
||||
control the profiling granularity. Passing --max-depth=1 to the above would stop inside the `do`
|
||||
at `if true { 'spam' }`. The id is used to identify each element. The parent_id tells you that the
|
||||
instructions inside the block are being executed because of `do` (5), which in turn was spawned from
|
||||
the root `debug profile { ... }`.
|
||||
|
||||
Note: In some cases, the ordering of piepeline elements might not be intuitive. For example,
|
||||
For a better understanding of how instructions map to source code, see the `view ir` command.
|
||||
|
||||
Note: In some cases, the ordering of pipeline elements might not be intuitive. For example,
|
||||
`[ a bb cc ] | each { $in | str length }` involves some implicit collects and lazy evaluation
|
||||
confusing the id/parent_id hierarchy. The --expr flag is helpful for investigating these issues."#
|
||||
}
|
||||
@ -94,8 +107,6 @@ confusing the id/parent_id hierarchy. The --expr flag is helpful for investigati
|
||||
let collect_spans = call.has_flag(engine_state, stack, "spans")?;
|
||||
let collect_expanded_source = call.has_flag(engine_state, stack, "expanded-source")?;
|
||||
let collect_values = call.has_flag(engine_state, stack, "values")?;
|
||||
let collect_exprs = call.has_flag(engine_state, stack, "expr")?;
|
||||
let collect_instructions = call.has_flag(engine_state, stack, "instructions")?;
|
||||
let collect_lines = call.has_flag(engine_state, stack, "lines")?;
|
||||
let max_depth = call
|
||||
.get_flag(engine_state, stack, "max-depth")?
|
||||
@ -108,8 +119,8 @@ confusing the id/parent_id hierarchy. The --expr flag is helpful for investigati
|
||||
collect_source: true,
|
||||
collect_expanded_source,
|
||||
collect_values,
|
||||
collect_exprs,
|
||||
collect_instructions,
|
||||
collect_exprs: false,
|
||||
collect_instructions: true,
|
||||
collect_lines,
|
||||
},
|
||||
call.span(),
|
||||
|
@ -230,6 +230,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||
Rm,
|
||||
Save,
|
||||
Touch,
|
||||
UTouch,
|
||||
Glob,
|
||||
Watch,
|
||||
};
|
||||
|
@ -114,6 +114,8 @@ impl Command for Cd {
|
||||
//FIXME: this only changes the current scope, but instead this environment variable
|
||||
//should probably be a block that loads the information from the state in the overlay
|
||||
PermissionResult::PermissionOk => {
|
||||
use nu_path::pwd_per_drive::pwd_per_drive::set_pwd_per_drive;
|
||||
let _as_is = set_pwd_per_drive(path.as_path());
|
||||
stack.set_cwd(path)?;
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
|
@ -287,28 +287,10 @@ fn ls_for_one_pattern(
|
||||
nu_path::expand_path_with(pat.item.as_ref(), &cwd, pat.item.is_expand());
|
||||
// Avoid checking and pushing "*" to the path when directory (do not show contents) flag is true
|
||||
if !directory && tmp_expanded.is_dir() {
|
||||
if permission_denied(&tmp_expanded) {
|
||||
#[cfg(unix)]
|
||||
let error_msg = format!(
|
||||
"The permissions of {:o} do not allow access for this user",
|
||||
tmp_expanded
|
||||
.metadata()
|
||||
.expect("this shouldn't be called since we already know there is a dir")
|
||||
.permissions()
|
||||
.mode()
|
||||
& 0o0777
|
||||
);
|
||||
#[cfg(not(unix))]
|
||||
let error_msg = String::from("Permission denied");
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Permission denied".into(),
|
||||
msg: error_msg,
|
||||
span: Some(p_tag),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
if is_empty_dir(&tmp_expanded) {
|
||||
if read_dir(&tmp_expanded, p_tag, use_threads)?
|
||||
.next()
|
||||
.is_none()
|
||||
{
|
||||
return Ok(Value::test_nothing().into_pipeline_data());
|
||||
}
|
||||
just_read_dir = !(pat.item.is_expand() && pat.item.as_ref().contains(GLOB_CHARS));
|
||||
@ -327,7 +309,7 @@ fn ls_for_one_pattern(
|
||||
// Avoid pushing "*" to the default path when directory (do not show contents) flag is true
|
||||
if directory {
|
||||
(NuGlob::Expand(".".to_string()), false)
|
||||
} else if is_empty_dir(&cwd) {
|
||||
} else if read_dir(&cwd, p_tag, use_threads)?.next().is_none() {
|
||||
return Ok(Value::test_nothing().into_pipeline_data());
|
||||
} else {
|
||||
(NuGlob::Expand("*".to_string()), false)
|
||||
@ -339,7 +321,7 @@ fn ls_for_one_pattern(
|
||||
let path = pattern_arg.into_spanned(p_tag);
|
||||
let (prefix, paths) = if just_read_dir {
|
||||
let expanded = nu_path::expand_path_with(path.item.as_ref(), &cwd, path.item.is_expand());
|
||||
let paths = read_dir(&expanded)?;
|
||||
let paths = read_dir(&expanded, p_tag, use_threads)?;
|
||||
// just need to read the directory, so prefix is path itself.
|
||||
(Some(expanded), paths)
|
||||
} else {
|
||||
@ -492,20 +474,6 @@ fn ls_for_one_pattern(
|
||||
.into_pipeline_data(call_span, signals.clone()))
|
||||
}
|
||||
|
||||
fn permission_denied(dir: impl AsRef<Path>) -> bool {
|
||||
match dir.as_ref().read_dir() {
|
||||
Err(e) => matches!(e.kind(), std::io::ErrorKind::PermissionDenied),
|
||||
Ok(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty_dir(dir: impl AsRef<Path>) -> bool {
|
||||
match dir.as_ref().read_dir() {
|
||||
Err(_) => true,
|
||||
Ok(mut s) => s.next().is_none(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_hidden_dir(dir: impl AsRef<Path>) -> bool {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
@ -979,10 +947,36 @@ mod windows_helper {
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn read_dir(
|
||||
f: &Path,
|
||||
span: Span,
|
||||
use_threads: bool,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<PathBuf, ShellError>> + Send>, ShellError> {
|
||||
let iter = f.read_dir()?.map(|d| {
|
||||
d.map(|r| r.path())
|
||||
.map_err(|e| ShellError::IOError { msg: e.to_string() })
|
||||
});
|
||||
Ok(Box::new(iter))
|
||||
let items = f
|
||||
.read_dir()
|
||||
.map_err(|error| {
|
||||
if error.kind() == std::io::ErrorKind::PermissionDenied {
|
||||
return ShellError::GenericError {
|
||||
error: "Permission denied".into(),
|
||||
msg: "The permissions may not allow access for this user".into(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
};
|
||||
}
|
||||
|
||||
error.into()
|
||||
})?
|
||||
.map(|d| {
|
||||
d.map(|r| r.path())
|
||||
.map_err(|e| ShellError::IOError { msg: e.to_string() })
|
||||
});
|
||||
if !use_threads {
|
||||
let mut collected = items.collect::<Vec<_>>();
|
||||
collected.sort_by(|a, b| {
|
||||
let a = a.as_ref().expect("path should be valid");
|
||||
let b = b.as_ref().expect("path should be valid");
|
||||
a.cmp(b)
|
||||
});
|
||||
return Ok(Box::new(collected.into_iter()));
|
||||
}
|
||||
Ok(Box::new(items))
|
||||
}
|
||||
|
@ -12,6 +12,7 @@ mod ucp;
|
||||
mod umkdir;
|
||||
mod umv;
|
||||
mod util;
|
||||
mod utouch;
|
||||
mod watch;
|
||||
|
||||
pub use self::open::Open;
|
||||
@ -27,4 +28,5 @@ pub use touch::Touch;
|
||||
pub use ucp::UCp;
|
||||
pub use umkdir::UMkdir;
|
||||
pub use umv::UMv;
|
||||
pub use utouch::UTouch;
|
||||
pub use watch::Watch;
|
||||
|
@ -188,6 +188,7 @@ impl Command for UMv {
|
||||
target_dir: None,
|
||||
no_target_dir: false,
|
||||
strip_slashes: false,
|
||||
debug: false,
|
||||
};
|
||||
if let Err(error) = uu_mv::mv(&files, &options) {
|
||||
return Err(ShellError::GenericError {
|
||||
|
268
crates/nu-command/src/filesystem/utouch.rs
Normal file
268
crates/nu-command/src/filesystem/utouch.rs
Normal file
@ -0,0 +1,268 @@
|
||||
use std::io::ErrorKind;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use chrono::{DateTime, FixedOffset};
|
||||
use filetime::FileTime;
|
||||
|
||||
use nu_engine::CallExt;
|
||||
use nu_path::expand_path_with;
|
||||
use nu_protocol::engine::{Call, Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
Category, Example, NuGlob, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type,
|
||||
};
|
||||
use uu_touch::error::TouchError;
|
||||
use uu_touch::{ChangeTimes, InputFile, Options, Source};
|
||||
|
||||
use super::util::get_rest_for_glob_pattern;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct UTouch;
|
||||
|
||||
impl Command for UTouch {
|
||||
fn name(&self) -> &str {
|
||||
"utouch"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["create", "file"]
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("utouch")
|
||||
.input_output_types(vec![ (Type::Nothing, Type::Nothing) ])
|
||||
.rest(
|
||||
"files",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::GlobPattern, SyntaxShape::Filepath]),
|
||||
"The file(s) to create. '-' is used to represent stdout."
|
||||
)
|
||||
.named(
|
||||
"reference",
|
||||
SyntaxShape::Filepath,
|
||||
"Use the access and modification times of the reference file/directory instead of the current time",
|
||||
Some('r'),
|
||||
)
|
||||
.named(
|
||||
"timestamp",
|
||||
SyntaxShape::DateTime,
|
||||
"Use the given timestamp instead of the current time",
|
||||
Some('t')
|
||||
)
|
||||
.named(
|
||||
"date",
|
||||
SyntaxShape::String,
|
||||
"Use the given time instead of the current time. This can be a full timestamp or it can be relative to either the current time or reference file time (if given). For more information, see https://www.gnu.org/software/coreutils/manual/html_node/touch-invocation.html",
|
||||
Some('d')
|
||||
)
|
||||
.switch(
|
||||
"modified",
|
||||
"Change only the modification time (if used with -a, access time is changed too)",
|
||||
Some('m'),
|
||||
)
|
||||
.switch(
|
||||
"access",
|
||||
"Change only the access time (if used with -m, modification time is changed too)",
|
||||
Some('a'),
|
||||
)
|
||||
.switch(
|
||||
"no-create",
|
||||
"Don't create the file if it doesn't exist",
|
||||
Some('c'),
|
||||
)
|
||||
.switch(
|
||||
"no-deref",
|
||||
"Affect each symbolic link instead of any referenced file (only for systems that can change the timestamps of a symlink). Ignored if touching stdout",
|
||||
Some('s'),
|
||||
)
|
||||
.category(Category::FileSystem)
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Creates one or more files."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let change_mtime: bool = call.has_flag(engine_state, stack, "modified")?;
|
||||
let change_atime: bool = call.has_flag(engine_state, stack, "access")?;
|
||||
let no_create: bool = call.has_flag(engine_state, stack, "no-create")?;
|
||||
let no_deref: bool = call.has_flag(engine_state, stack, "no-dereference")?;
|
||||
let file_globs: Vec<Spanned<NuGlob>> =
|
||||
get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
||||
let cwd = engine_state.cwd(Some(stack))?;
|
||||
|
||||
if file_globs.is_empty() {
|
||||
return Err(ShellError::MissingParameter {
|
||||
param_name: "requires file paths".to_string(),
|
||||
span: call.head,
|
||||
});
|
||||
}
|
||||
|
||||
let (reference_file, reference_span) = if let Some(reference) =
|
||||
call.get_flag::<Spanned<PathBuf>>(engine_state, stack, "reference")?
|
||||
{
|
||||
(Some(reference.item), Some(reference.span))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
let (date_str, date_span) =
|
||||
if let Some(date) = call.get_flag::<Spanned<String>>(engine_state, stack, "date")? {
|
||||
(Some(date.item), Some(date.span))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
let timestamp: Option<Spanned<DateTime<FixedOffset>>> =
|
||||
call.get_flag(engine_state, stack, "timestamp")?;
|
||||
|
||||
let source = if let Some(timestamp) = timestamp {
|
||||
if let Some(reference_span) = reference_span {
|
||||
return Err(ShellError::IncompatibleParameters {
|
||||
left_message: "timestamp given".to_string(),
|
||||
left_span: timestamp.span,
|
||||
right_message: "reference given".to_string(),
|
||||
right_span: reference_span,
|
||||
});
|
||||
}
|
||||
if let Some(date_span) = date_span {
|
||||
return Err(ShellError::IncompatibleParameters {
|
||||
left_message: "timestamp given".to_string(),
|
||||
left_span: timestamp.span,
|
||||
right_message: "date given".to_string(),
|
||||
right_span: date_span,
|
||||
});
|
||||
}
|
||||
Source::Timestamp(FileTime::from_unix_time(
|
||||
timestamp.item.timestamp(),
|
||||
timestamp.item.timestamp_subsec_nanos(),
|
||||
))
|
||||
} else if let Some(reference_file) = reference_file {
|
||||
let reference_file = expand_path_with(reference_file, &cwd, true);
|
||||
Source::Reference(reference_file)
|
||||
} else {
|
||||
Source::Now
|
||||
};
|
||||
|
||||
let change_times = if change_atime && !change_mtime {
|
||||
ChangeTimes::AtimeOnly
|
||||
} else if change_mtime && !change_atime {
|
||||
ChangeTimes::MtimeOnly
|
||||
} else {
|
||||
ChangeTimes::Both
|
||||
};
|
||||
|
||||
let mut input_files = Vec::new();
|
||||
for file_glob in &file_globs {
|
||||
if file_glob.item.as_ref() == "-" {
|
||||
input_files.push(InputFile::Stdout);
|
||||
} else {
|
||||
let path =
|
||||
expand_path_with(file_glob.item.as_ref(), &cwd, file_glob.item.is_expand());
|
||||
input_files.push(InputFile::Path(path));
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(err) = uu_touch::touch(
|
||||
&input_files,
|
||||
&Options {
|
||||
no_create,
|
||||
no_deref,
|
||||
source,
|
||||
date: date_str,
|
||||
change_times,
|
||||
strict: true,
|
||||
},
|
||||
) {
|
||||
let nu_err = match err {
|
||||
TouchError::TouchFileError { path, index, error } => ShellError::GenericError {
|
||||
error: format!("Could not touch {}", path.display()),
|
||||
msg: error.to_string(),
|
||||
span: Some(file_globs[index].span),
|
||||
help: None,
|
||||
inner: Vec::new(),
|
||||
},
|
||||
TouchError::InvalidDateFormat(date) => ShellError::IncorrectValue {
|
||||
msg: format!("Invalid date: {}", date),
|
||||
val_span: date_span.expect("utouch should've been given a date"),
|
||||
call_span: call.head,
|
||||
},
|
||||
TouchError::ReferenceFileInaccessible(reference_path, io_err) => {
|
||||
let span =
|
||||
reference_span.expect("utouch should've been given a reference file");
|
||||
if io_err.kind() == ErrorKind::NotFound {
|
||||
ShellError::FileNotFound {
|
||||
span,
|
||||
file: reference_path.display().to_string(),
|
||||
}
|
||||
} else {
|
||||
ShellError::GenericError {
|
||||
error: io_err.to_string(),
|
||||
msg: format!("Failed to read metadata of {}", reference_path.display()),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => ShellError::GenericError {
|
||||
error: err.to_string(),
|
||||
msg: err.to_string(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: Vec::new(),
|
||||
},
|
||||
};
|
||||
return Err(nu_err);
|
||||
}
|
||||
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Creates \"fixture.json\"",
|
||||
example: "utouch fixture.json",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Creates files a, b and c",
|
||||
example: "utouch a b c",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: r#"Changes the last modified time of "fixture.json" to today's date"#,
|
||||
example: "utouch -m fixture.json",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Changes the last accessed and modified times of files a, b and c to the current time but yesterday",
|
||||
example: r#"utouch -d "yesterday" a b c"#,
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: r#"Changes the last modified time of files d and e to "fixture.json"'s last modified time"#,
|
||||
example: r#"utouch -m -r fixture.json d e"#,
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: r#"Changes the last accessed time of "fixture.json" to a datetime"#,
|
||||
example: r#"utouch -a -t 2019-08-24T12:30:30 fixture.json"#,
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: r#"Change the last accessed and modified times of stdout"#,
|
||||
example: r#"utouch -"#,
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: r#"Changes the last accessed and modified times of file a to 1 month before "fixture.json"'s last modified time"#,
|
||||
example: r#"utouch -r fixture.json -d "-1 month" a"#,
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
use indexmap::IndexMap;
|
||||
use nu_engine::{command_prelude::*, ClosureEval};
|
||||
use nu_protocol::engine::Closure;
|
||||
use nu_protocol::{engine::Closure, FromValue, IntoValue};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct GroupBy;
|
||||
@ -12,17 +12,13 @@ impl Command for GroupBy {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("group-by")
|
||||
// TODO: It accepts Table also, but currently there is no Table
|
||||
// example. Perhaps Table should be a subtype of List, in which case
|
||||
// the current signature would suffice even when a Table example
|
||||
// exists.
|
||||
.input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Any)])
|
||||
.switch(
|
||||
"to-table",
|
||||
"Return a table with \"groups\" and \"items\" columns",
|
||||
None,
|
||||
)
|
||||
.optional(
|
||||
.rest(
|
||||
"grouper",
|
||||
SyntaxShape::OneOf(vec![
|
||||
SyntaxShape::CellPath,
|
||||
@ -135,7 +131,89 @@ impl Command for GroupBy {
|
||||
Value::test_string("false"),
|
||||
]),
|
||||
})),
|
||||
}
|
||||
},
|
||||
Example {
|
||||
description: "Group items by multiple columns' values",
|
||||
example: r#"[
|
||||
[name, lang, year];
|
||||
[andres, rb, "2019"],
|
||||
[jt, rs, "2019"],
|
||||
[storm, rs, "2021"]
|
||||
]
|
||||
| group-by lang year"#,
|
||||
result: Some(Value::test_record(record! {
|
||||
"rb" => Value::test_record(record! {
|
||||
"2019" => Value::test_list(
|
||||
vec![Value::test_record(record! {
|
||||
"name" => Value::test_string("andres"),
|
||||
"lang" => Value::test_string("rb"),
|
||||
"year" => Value::test_string("2019"),
|
||||
})],
|
||||
),
|
||||
}),
|
||||
"rs" => Value::test_record(record! {
|
||||
"2019" => Value::test_list(
|
||||
vec![Value::test_record(record! {
|
||||
"name" => Value::test_string("jt"),
|
||||
"lang" => Value::test_string("rs"),
|
||||
"year" => Value::test_string("2019"),
|
||||
})],
|
||||
),
|
||||
"2021" => Value::test_list(
|
||||
vec![Value::test_record(record! {
|
||||
"name" => Value::test_string("storm"),
|
||||
"lang" => Value::test_string("rs"),
|
||||
"year" => Value::test_string("2021"),
|
||||
})],
|
||||
),
|
||||
}),
|
||||
}))
|
||||
},
|
||||
Example {
|
||||
description: "Group items by multiple columns' values",
|
||||
example: r#"[
|
||||
[name, lang, year];
|
||||
[andres, rb, "2019"],
|
||||
[jt, rs, "2019"],
|
||||
[storm, rs, "2021"]
|
||||
]
|
||||
| group-by lang year --to-table"#,
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"lang" => Value::test_string("rb"),
|
||||
"year" => Value::test_string("2019"),
|
||||
"items" => Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"name" => Value::test_string("andres"),
|
||||
"lang" => Value::test_string("rb"),
|
||||
"year" => Value::test_string("2019"),
|
||||
})
|
||||
]),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"lang" => Value::test_string("rs"),
|
||||
"year" => Value::test_string("2019"),
|
||||
"items" => Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"name" => Value::test_string("jt"),
|
||||
"lang" => Value::test_string("rs"),
|
||||
"year" => Value::test_string("2019"),
|
||||
})
|
||||
]),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"lang" => Value::test_string("rs"),
|
||||
"year" => Value::test_string("2021"),
|
||||
"items" => Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"name" => Value::test_string("storm"),
|
||||
"lang" => Value::test_string("rs"),
|
||||
"year" => Value::test_string("2021"),
|
||||
})
|
||||
]),
|
||||
}),
|
||||
]))
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -147,7 +225,7 @@ pub fn group_by(
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let grouper: Option<Value> = call.opt(engine_state, stack, 0)?;
|
||||
let groupers: Vec<Spanned<Grouper>> = call.rest(engine_state, stack, 0)?;
|
||||
let to_table = call.has_flag(engine_state, stack, "to-table")?;
|
||||
let config = engine_state.get_config();
|
||||
|
||||
@ -156,36 +234,88 @@ pub fn group_by(
|
||||
return Ok(Value::record(Record::new(), head).into_pipeline_data());
|
||||
}
|
||||
|
||||
let groups = match grouper {
|
||||
Some(grouper) => {
|
||||
let span = grouper.span();
|
||||
match grouper {
|
||||
Value::CellPath { val, .. } => group_cell_path(val, values, config)?,
|
||||
Value::Closure { val, .. } => {
|
||||
group_closure(values, span, *val, engine_state, stack)?
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: "unsupported grouper type".to_string(),
|
||||
span,
|
||||
})
|
||||
}
|
||||
let grouped = match &groupers[..] {
|
||||
[first, rest @ ..] => {
|
||||
let mut grouped = Grouped::new(first.as_ref(), values, config, engine_state, stack)?;
|
||||
for grouper in rest {
|
||||
grouped.subgroup(grouper.as_ref(), config, engine_state, stack)?;
|
||||
}
|
||||
grouped
|
||||
}
|
||||
None => group_no_grouper(values, config)?,
|
||||
[] => Grouped::empty(values, config),
|
||||
};
|
||||
|
||||
let value = if to_table {
|
||||
groups_to_table(groups, head)
|
||||
let column_names = groupers_to_column_names(&groupers)?;
|
||||
grouped.into_table(&column_names, head)
|
||||
} else {
|
||||
groups_to_record(groups, head)
|
||||
grouped.into_record(head)
|
||||
};
|
||||
|
||||
Ok(value.into_pipeline_data())
|
||||
}
|
||||
|
||||
fn groupers_to_column_names(groupers: &[Spanned<Grouper>]) -> Result<Vec<String>, ShellError> {
|
||||
if groupers.is_empty() {
|
||||
return Ok(vec!["group".into(), "items".into()]);
|
||||
}
|
||||
|
||||
let mut closure_idx: usize = 0;
|
||||
let grouper_names = groupers.iter().map(|grouper| {
|
||||
grouper.as_ref().map(|item| match item {
|
||||
Grouper::CellPath { val } => val.to_column_name(),
|
||||
Grouper::Closure { .. } => {
|
||||
closure_idx += 1;
|
||||
format!("closure_{}", closure_idx - 1)
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let mut name_set: Vec<Spanned<String>> = Vec::with_capacity(grouper_names.len());
|
||||
|
||||
for name in grouper_names {
|
||||
if name.item == "items" {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "grouper arguments can't be named `items`".into(),
|
||||
msg: "here".into(),
|
||||
span: Some(name.span),
|
||||
help: Some("instead of a cell-path, try using a closure: { get items }".into()),
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(conflicting_name) = name_set
|
||||
.iter()
|
||||
.find(|elem| elem.as_ref().item == name.item.as_str())
|
||||
{
|
||||
return Err(ShellError::GenericError {
|
||||
error: "grouper arguments result in colliding column names".into(),
|
||||
msg: "duplicate column names".into(),
|
||||
span: Some(conflicting_name.span.append(name.span)),
|
||||
help: Some(
|
||||
"instead of a cell-path, try using a closure or renaming columns".into(),
|
||||
),
|
||||
inner: vec![ShellError::ColumnDefinedTwice {
|
||||
col_name: conflicting_name.item.clone(),
|
||||
first_use: conflicting_name.span,
|
||||
second_use: name.span,
|
||||
}],
|
||||
});
|
||||
}
|
||||
|
||||
name_set.push(name);
|
||||
}
|
||||
|
||||
let column_names: Vec<String> = name_set
|
||||
.into_iter()
|
||||
.map(|elem| elem.item)
|
||||
.chain(["items".into()])
|
||||
.collect();
|
||||
Ok(column_names)
|
||||
}
|
||||
|
||||
fn group_cell_path(
|
||||
column_name: CellPath,
|
||||
column_name: &CellPath,
|
||||
values: Vec<Value>,
|
||||
config: &nu_protocol::Config,
|
||||
) -> Result<IndexMap<String, Vec<Value>>, ShellError> {
|
||||
@ -207,20 +337,6 @@ fn group_cell_path(
|
||||
Ok(groups)
|
||||
}
|
||||
|
||||
fn group_no_grouper(
|
||||
values: Vec<Value>,
|
||||
config: &nu_protocol::Config,
|
||||
) -> Result<IndexMap<String, Vec<Value>>, ShellError> {
|
||||
let mut groups = IndexMap::<_, Vec<_>>::new();
|
||||
|
||||
for value in values.into_iter() {
|
||||
let key = value.to_abbreviated_string(config);
|
||||
groups.entry(key).or_default().push(value);
|
||||
}
|
||||
|
||||
Ok(groups)
|
||||
}
|
||||
|
||||
fn group_closure(
|
||||
values: Vec<Value>,
|
||||
span: Span,
|
||||
@ -244,32 +360,147 @@ fn group_closure(
|
||||
Ok(groups)
|
||||
}
|
||||
|
||||
fn groups_to_record(groups: IndexMap<String, Vec<Value>>, span: Span) -> Value {
|
||||
Value::record(
|
||||
groups
|
||||
.into_iter()
|
||||
.map(|(k, v)| (k, Value::list(v, span)))
|
||||
.collect(),
|
||||
span,
|
||||
)
|
||||
enum Grouper {
|
||||
CellPath { val: CellPath },
|
||||
Closure { val: Box<Closure> },
|
||||
}
|
||||
|
||||
fn groups_to_table(groups: IndexMap<String, Vec<Value>>, span: Span) -> Value {
|
||||
Value::list(
|
||||
groups
|
||||
impl FromValue for Grouper {
|
||||
fn from_value(v: Value) -> Result<Self, ShellError> {
|
||||
match v {
|
||||
Value::CellPath { val, .. } => Ok(Grouper::CellPath { val }),
|
||||
Value::Closure { val, .. } => Ok(Grouper::Closure { val }),
|
||||
_ => Err(ShellError::TypeMismatch {
|
||||
err_message: "unsupported grouper type".to_string(),
|
||||
span: v.span(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Grouped {
|
||||
groups: Tree,
|
||||
}
|
||||
|
||||
enum Tree {
|
||||
Leaf(IndexMap<String, Vec<Value>>),
|
||||
Branch(IndexMap<String, Grouped>),
|
||||
}
|
||||
|
||||
impl Grouped {
|
||||
fn empty(values: Vec<Value>, config: &nu_protocol::Config) -> Self {
|
||||
let mut groups = IndexMap::<_, Vec<_>>::new();
|
||||
|
||||
for value in values.into_iter() {
|
||||
let key = value.to_abbreviated_string(config);
|
||||
groups.entry(key).or_default().push(value);
|
||||
}
|
||||
|
||||
Self {
|
||||
groups: Tree::Leaf(groups),
|
||||
}
|
||||
}
|
||||
|
||||
fn new(
|
||||
grouper: Spanned<&Grouper>,
|
||||
values: Vec<Value>,
|
||||
config: &nu_protocol::Config,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
) -> Result<Self, ShellError> {
|
||||
let groups = match grouper.item {
|
||||
Grouper::CellPath { val } => group_cell_path(val, values, config)?,
|
||||
Grouper::Closure { val } => group_closure(
|
||||
values,
|
||||
grouper.span,
|
||||
Closure::clone(val),
|
||||
engine_state,
|
||||
stack,
|
||||
)?,
|
||||
};
|
||||
Ok(Self {
|
||||
groups: Tree::Leaf(groups),
|
||||
})
|
||||
}
|
||||
|
||||
fn subgroup(
|
||||
&mut self,
|
||||
grouper: Spanned<&Grouper>,
|
||||
config: &nu_protocol::Config,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
) -> Result<(), ShellError> {
|
||||
let groups = match &mut self.groups {
|
||||
Tree::Leaf(groups) => std::mem::take(groups)
|
||||
.into_iter()
|
||||
.map(|(key, values)| -> Result<_, ShellError> {
|
||||
let leaf = Self::new(grouper, values, config, engine_state, stack)?;
|
||||
Ok((key, leaf))
|
||||
})
|
||||
.collect::<Result<IndexMap<_, _>, ShellError>>()?,
|
||||
Tree::Branch(nested_groups) => {
|
||||
let mut nested_groups = std::mem::take(nested_groups);
|
||||
for v in nested_groups.values_mut() {
|
||||
v.subgroup(grouper, config, engine_state, stack)?;
|
||||
}
|
||||
nested_groups
|
||||
}
|
||||
};
|
||||
self.groups = Tree::Branch(groups);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn into_table(self, column_names: &[String], head: Span) -> Value {
|
||||
self._into_table(head)
|
||||
.into_iter()
|
||||
.map(|(group, items)| {
|
||||
Value::record(
|
||||
record! {
|
||||
"group" => Value::string(group, span),
|
||||
"items" => Value::list(items, span),
|
||||
},
|
||||
span,
|
||||
)
|
||||
.map(|row| {
|
||||
row.into_iter()
|
||||
.rev()
|
||||
.zip(column_names)
|
||||
.map(|(val, key)| (key.clone(), val))
|
||||
.collect::<Record>()
|
||||
.into_value(head)
|
||||
})
|
||||
.collect(),
|
||||
span,
|
||||
)
|
||||
.collect::<Vec<_>>()
|
||||
.into_value(head)
|
||||
}
|
||||
|
||||
fn _into_table(self, head: Span) -> Vec<Vec<Value>> {
|
||||
match self.groups {
|
||||
Tree::Leaf(leaf) => leaf
|
||||
.into_iter()
|
||||
.map(|(group, values)| vec![(values.into_value(head)), (group.into_value(head))])
|
||||
.collect::<Vec<Vec<Value>>>(),
|
||||
Tree::Branch(branch) => branch
|
||||
.into_iter()
|
||||
.flat_map(|(group, items)| {
|
||||
let mut inner = items._into_table(head);
|
||||
for row in &mut inner {
|
||||
row.push(group.clone().into_value(head));
|
||||
}
|
||||
inner
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
fn into_record(self, head: Span) -> Value {
|
||||
match self.groups {
|
||||
Tree::Leaf(leaf) => Value::record(
|
||||
leaf.into_iter()
|
||||
.map(|(k, v)| (k, v.into_value(head)))
|
||||
.collect(),
|
||||
head,
|
||||
),
|
||||
Tree::Branch(branch) => {
|
||||
let values = branch
|
||||
.into_iter()
|
||||
.map(|(k, v)| (k, v.into_record(head)))
|
||||
.collect();
|
||||
Value::record(values, head)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -45,9 +45,10 @@ impl Command for SeqChar {
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "sequence a to e, and put the characters in a pipe-separated string",
|
||||
description: "Sequence a to e, and join the characters with a pipe",
|
||||
example: "seq char a e | str join '|'",
|
||||
// TODO: it would be nice to test this example, but it currently breaks the input/output type tests
|
||||
// result: Some(Value::test_string("a|b|c|d|e")),
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
@ -65,7 +66,7 @@ impl Command for SeqChar {
|
||||
}
|
||||
|
||||
fn is_single_character(ch: &str) -> bool {
|
||||
ch.is_ascii() && ch.len() == 1 && ch.chars().all(char::is_alphabetic)
|
||||
ch.is_ascii() && (ch.len() == 1)
|
||||
}
|
||||
|
||||
fn seq_char(
|
||||
@ -79,7 +80,7 @@ fn seq_char(
|
||||
if !is_single_character(&start.item) {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "seq char only accepts individual ASCII characters as parameters".into(),
|
||||
msg: "should be 1 character long".into(),
|
||||
msg: "input should be a single ASCII character".into(),
|
||||
span: Some(start.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
@ -89,7 +90,7 @@ fn seq_char(
|
||||
if !is_single_character(&end.item) {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "seq char only accepts individual ASCII characters as parameters".into(),
|
||||
msg: "should be 1 character long".into(),
|
||||
msg: "input should be a single ASCII character".into(),
|
||||
span: Some(end.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
@ -115,18 +116,27 @@ fn seq_char(
|
||||
}
|
||||
|
||||
fn run_seq_char(start_ch: char, end_ch: char, span: Span) -> Result<PipelineData, ShellError> {
|
||||
let mut result_vec = vec![];
|
||||
for current_ch in start_ch as u8..end_ch as u8 + 1 {
|
||||
result_vec.push((current_ch as char).to_string())
|
||||
}
|
||||
|
||||
let start = start_ch as u8;
|
||||
let end = end_ch as u8;
|
||||
let range = if start <= end {
|
||||
start..=end
|
||||
} else {
|
||||
end..=start
|
||||
};
|
||||
let result_vec = if start <= end {
|
||||
range.map(|c| (c as char).to_string()).collect::<Vec<_>>()
|
||||
} else {
|
||||
range
|
||||
.rev()
|
||||
.map(|c| (c as char).to_string())
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
let result = result_vec
|
||||
.into_iter()
|
||||
.map(|x| Value::string(x, span))
|
||||
.collect::<Vec<Value>>();
|
||||
Ok(Value::list(result, span).into_pipeline_data())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -45,11 +45,6 @@ impl NuProgressBar {
|
||||
self.pb.set_position(bytes_processed);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn finished_msg(&self, msg: String) {
|
||||
self.pb.finish_with_message(msg);
|
||||
}
|
||||
|
||||
pub fn abandoned_msg(&self, msg: String) {
|
||||
self.pb.abandon_with_message(msg);
|
||||
}
|
||||
|
@ -291,42 +291,39 @@ fn positions_helper(blanks: &[usize], min_lines: usize) -> Vec<usize> {
|
||||
pos
|
||||
}
|
||||
|
||||
// to_rows returns rows separated by columns.
|
||||
#[allow(dead_code)]
|
||||
fn to_rows(lines: Vec<String>, pos: Vec<usize>, trim_space: bool) -> Vec<Vec<String>> {
|
||||
let mut rows: Vec<Vec<String>> = Vec::with_capacity(lines.len());
|
||||
for line in lines {
|
||||
let columns = split(&line, &pos, trim_space);
|
||||
rows.push(columns);
|
||||
}
|
||||
rows
|
||||
}
|
||||
|
||||
// to_table parses a slice of lines and returns a table.
|
||||
#[allow(dead_code)]
|
||||
pub fn to_table(lines: Vec<String>, header: usize, trim_space: bool) -> Vec<Vec<String>> {
|
||||
let pos = positions(&lines, header, 2);
|
||||
to_rows(lines, pos, trim_space)
|
||||
}
|
||||
|
||||
// to_table_n parses a slice of lines and returns a table, but limits the number of splits.
|
||||
#[allow(dead_code)]
|
||||
pub fn to_table_n(
|
||||
lines: Vec<String>,
|
||||
header: usize,
|
||||
num_split: usize,
|
||||
trim_space: bool,
|
||||
) -> Vec<Vec<String>> {
|
||||
let mut pos = positions(&lines, header, 2);
|
||||
if pos.len() > num_split {
|
||||
pos.truncate(num_split);
|
||||
}
|
||||
to_rows(lines, pos, trim_space)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{to_table, to_table_n, GuessWidth};
|
||||
use super::*;
|
||||
|
||||
/// to_rows returns rows separated by columns.
|
||||
fn to_rows(lines: Vec<String>, pos: Vec<usize>, trim_space: bool) -> Vec<Vec<String>> {
|
||||
let mut rows: Vec<Vec<String>> = Vec::with_capacity(lines.len());
|
||||
for line in lines {
|
||||
let columns = split(&line, &pos, trim_space);
|
||||
rows.push(columns);
|
||||
}
|
||||
rows
|
||||
}
|
||||
|
||||
/// to_table parses a slice of lines and returns a table.
|
||||
pub fn to_table(lines: Vec<String>, header: usize, trim_space: bool) -> Vec<Vec<String>> {
|
||||
let pos = positions(&lines, header, 2);
|
||||
to_rows(lines, pos, trim_space)
|
||||
}
|
||||
|
||||
/// to_table_n parses a slice of lines and returns a table, but limits the number of splits.
|
||||
pub fn to_table_n(
|
||||
lines: Vec<String>,
|
||||
header: usize,
|
||||
num_split: usize,
|
||||
trim_space: bool,
|
||||
) -> Vec<Vec<String>> {
|
||||
let mut pos = positions(&lines, header, 2);
|
||||
if pos.len() > num_split {
|
||||
pos.truncate(num_split);
|
||||
}
|
||||
to_rows(lines, pos, trim_space)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guess_width_ps_trim() {
|
||||
|
@ -51,7 +51,6 @@ impl Command for External {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let cwd = engine_state.cwd(Some(stack))?;
|
||||
|
||||
let name: Value = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let name_str: Cow<str> = match &name {
|
||||
@ -68,10 +67,36 @@ impl Command for External {
|
||||
_ => Path::new(&*name_str).to_owned(),
|
||||
};
|
||||
|
||||
// On Windows, the user could have run the cmd.exe built-in "assoc" command
|
||||
// Example: "assoc .nu=nuscript" and then run the cmd.exe built-in "ftype" command
|
||||
// Example: "ftype nuscript=C:\path\to\nu.exe '%1' %*" and then added the nushell
|
||||
// script extension ".NU" to the PATHEXT environment variable. In this case, we use
|
||||
// the which command, which will find the executable with or without the extension.
|
||||
// If it "which" returns true, that means that we've found the nushell script and we
|
||||
// believe the user wants to use the windows association to run the script. The only
|
||||
// easy way to do this is to run cmd.exe with the script as an argument.
|
||||
let potential_nuscript_in_windows = if cfg!(windows) {
|
||||
// let's make sure it's a .nu script
|
||||
if let Some(executable) = which(&expanded_name, "", cwd.as_ref()) {
|
||||
let ext = executable
|
||||
.extension()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.to_uppercase();
|
||||
ext == "NU"
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
// Find the absolute path to the executable. On Windows, set the
|
||||
// executable to "cmd.exe" if it's a CMD internal command. If the
|
||||
// command is not found, display a helpful error message.
|
||||
let executable = if cfg!(windows) && is_cmd_internal_command(&name_str) {
|
||||
let executable = if cfg!(windows)
|
||||
&& (is_cmd_internal_command(&name_str) || potential_nuscript_in_windows)
|
||||
{
|
||||
PathBuf::from("cmd.exe")
|
||||
} else {
|
||||
// Determine the PATH to be used and then use `which` to find it - though this has no
|
||||
@ -97,7 +122,7 @@ impl Command for External {
|
||||
// Configure args.
|
||||
let args = eval_arguments_from_call(engine_state, stack, call)?;
|
||||
#[cfg(windows)]
|
||||
if is_cmd_internal_command(&name_str) {
|
||||
if is_cmd_internal_command(&name_str) || potential_nuscript_in_windows {
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
// The /D flag disables execution of AutoRun commands from registry.
|
||||
@ -484,7 +509,7 @@ pub fn command_not_found(
|
||||
if Path::new(name).is_file() {
|
||||
return ShellError::ExternalCommand {
|
||||
label: format!("Command `{name}` not found"),
|
||||
help: format!("`{name}` refers to a file that is not executable. Did you forget to to set execute permissions?"),
|
||||
help: format!("`{name}` refers to a file that is not executable. Did you forget to set execute permissions?"),
|
||||
span,
|
||||
};
|
||||
}
|
||||
|
@ -2,6 +2,13 @@ use nu_test_support::nu;
|
||||
use nu_test_support::playground::Playground;
|
||||
use std::fs;
|
||||
|
||||
#[test]
|
||||
fn def_with_trailing_comma() {
|
||||
let actual = nu!("def test-command [ foo: int, ] { $foo }; test-command 1");
|
||||
|
||||
assert!(actual.out == "1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_with_comment() {
|
||||
Playground::setup("def_with_comment", |dirs, _| {
|
||||
@ -72,6 +79,13 @@ fn def_errors_with_comma_before_equals() {
|
||||
assert!(actual.err.contains("expected parameter"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_errors_with_colon_before_equals() {
|
||||
let actual = nu!("def test-command [ foo: = 1 ] {}");
|
||||
|
||||
assert!(actual.err.contains("expected type"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_errors_with_comma_before_colon() {
|
||||
let actual = nu!("def test-command [ foo, : int ] {}");
|
||||
@ -85,7 +99,6 @@ fn def_errors_with_multiple_colons() {
|
||||
assert!(actual.err.contains("expected type"));
|
||||
}
|
||||
|
||||
#[ignore = "This error condition is not implemented yet"]
|
||||
#[test]
|
||||
fn def_errors_with_multiple_types() {
|
||||
let actual = nu!("def test-command [ foo:int:string ] {}");
|
||||
@ -93,6 +106,20 @@ fn def_errors_with_multiple_types() {
|
||||
assert!(actual.err.contains("expected parameter"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_errors_with_trailing_colon() {
|
||||
let actual = nu!("def test-command [ foo: int: ] {}");
|
||||
|
||||
assert!(actual.err.contains("expected parameter"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_errors_with_trailing_default_value() {
|
||||
let actual = nu!("def test-command [ foo: int = ] {}");
|
||||
|
||||
assert!(actual.err.contains("expected default value"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_errors_with_multiple_commas() {
|
||||
let actual = nu!("def test-command [ foo,,bar ] {}");
|
||||
|
@ -378,32 +378,37 @@ fn glob_with_hidden_directory() {
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn fails_with_ls_to_dir_without_permission() {
|
||||
fn fails_with_permission_denied() {
|
||||
Playground::setup("ls_test_1", |dirs, sandbox| {
|
||||
sandbox
|
||||
.within("dir_a")
|
||||
.with_files(&[EmptyFile("yehuda.11.txt"), EmptyFile("jt10.txt")]);
|
||||
|
||||
let actual = nu!(
|
||||
let actual_with_path_arg = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
"
|
||||
chmod 000 dir_a; ls dir_a
|
||||
"
|
||||
));
|
||||
|
||||
let check_not_root = nu!(
|
||||
let actual_in_cwd = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
"
|
||||
chmod 100 dir_a; cd dir_a; ls
|
||||
"
|
||||
));
|
||||
|
||||
let get_uid = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
"
|
||||
id -u
|
||||
"
|
||||
));
|
||||
let is_root = get_uid.out == "0";
|
||||
|
||||
assert!(
|
||||
actual
|
||||
.err
|
||||
.contains("The permissions of 0 do not allow access for this user")
|
||||
|| check_not_root.out == "0"
|
||||
);
|
||||
assert!(actual_with_path_arg.err.contains("Permission denied") || is_root);
|
||||
|
||||
assert!(actual_in_cwd.err.contains("Permission denied") || is_root);
|
||||
})
|
||||
}
|
||||
|
||||
@ -833,3 +838,27 @@ fn list_symlink_with_full_path() {
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn consistent_list_order() {
|
||||
Playground::setup("ls_test_order", |dirs, sandbox| {
|
||||
sandbox.with_files(&[
|
||||
EmptyFile("los.txt"),
|
||||
EmptyFile("tres.txt"),
|
||||
EmptyFile("amigos.txt"),
|
||||
EmptyFile("arepas.clu"),
|
||||
]);
|
||||
|
||||
let no_arg = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
"ls"
|
||||
));
|
||||
|
||||
let with_arg = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
"ls ."
|
||||
));
|
||||
|
||||
assert_eq!(no_arg.out, with_arg.out);
|
||||
})
|
||||
}
|
||||
|
@ -127,6 +127,7 @@ mod update;
|
||||
mod upsert;
|
||||
mod url;
|
||||
mod use_;
|
||||
mod utouch;
|
||||
mod where_;
|
||||
mod which;
|
||||
mod while_;
|
||||
|
@ -513,13 +513,18 @@ fn test_mv_no_clobber() {
|
||||
sandbox.with_files(&[EmptyFile(file_a)]);
|
||||
sandbox.with_files(&[EmptyFile(file_b)]);
|
||||
|
||||
let actual = nu!(
|
||||
let _ = nu!(
|
||||
cwd: dirs.test(),
|
||||
"mv -n {} {}",
|
||||
file_a,
|
||||
file_b,
|
||||
);
|
||||
assert!(actual.err.contains("not replacing"));
|
||||
|
||||
let file_count = nu!(
|
||||
cwd: dirs.test(),
|
||||
"ls test_mv* | length | to nuon"
|
||||
);
|
||||
assert_eq!(file_count.out, "2");
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -4,12 +4,55 @@ use nu_test_support::nu;
|
||||
fn fails_when_first_arg_is_multiple_chars() {
|
||||
let actual = nu!("seq char aa z");
|
||||
|
||||
assert!(actual.err.contains("should be 1 character long"));
|
||||
assert!(actual
|
||||
.err
|
||||
.contains("input should be a single ASCII character"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fails_when_second_arg_is_multiple_chars() {
|
||||
let actual = nu!("seq char a zz");
|
||||
|
||||
assert!(actual.err.contains("should be 1 character long"));
|
||||
assert!(actual
|
||||
.err
|
||||
.contains("input should be a single ASCII character"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generates_sequence_from_a_to_e() {
|
||||
let actual = nu!("seq char a e | str join ''");
|
||||
|
||||
assert_eq!(actual.out, "abcde");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generates_sequence_from_e_to_a() {
|
||||
let actual = nu!("seq char e a | str join ''");
|
||||
|
||||
assert_eq!(actual.out, "edcba");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fails_when_non_ascii_character_is_used_in_first_arg() {
|
||||
let actual = nu!("seq char ñ z");
|
||||
|
||||
assert!(actual
|
||||
.err
|
||||
.contains("input should be a single ASCII character"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fails_when_non_ascii_character_is_used_in_second_arg() {
|
||||
let actual = nu!("seq char a ñ");
|
||||
|
||||
assert!(actual
|
||||
.err
|
||||
.contains("input should be a single ASCII character"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn joins_sequence_with_pipe() {
|
||||
let actual = nu!("seq char a e | str join '|'");
|
||||
|
||||
assert_eq!(actual.out, "a|b|c|d|e");
|
||||
}
|
||||
|
@ -841,14 +841,13 @@ fn test_cp_arg_no_clobber() {
|
||||
let target = dirs.fixtures.join("cp").join(TEST_HOW_ARE_YOU_SOURCE);
|
||||
let target_hash = get_file_hash(target.display());
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.root(),
|
||||
"cp {} {} --no-clobber",
|
||||
src.display(),
|
||||
target.display()
|
||||
let _ = nu!(
|
||||
cwd: dirs.root(),
|
||||
"cp {} {} --no-clobber",
|
||||
src.display(),
|
||||
target.display()
|
||||
);
|
||||
let after_cp_hash = get_file_hash(target.display());
|
||||
assert!(actual.err.contains("not replacing"));
|
||||
// Check content was not clobbered
|
||||
assert_eq!(after_cp_hash, target_hash);
|
||||
});
|
||||
|
740
crates/nu-command/tests/commands/utouch.rs
Normal file
740
crates/nu-command/tests/commands/utouch.rs
Normal file
@ -0,0 +1,740 @@
|
||||
use chrono::{DateTime, Days, Local, TimeDelta, Utc};
|
||||
use filetime::FileTime;
|
||||
use nu_test_support::fs::{files_exist_at, Stub};
|
||||
use nu_test_support::nu;
|
||||
use nu_test_support::playground::{Dirs, Playground};
|
||||
use std::path::Path;
|
||||
|
||||
// Use 1 instead of 0 because 0 has a special meaning in Windows
|
||||
const TIME_ONE: FileTime = FileTime::from_unix_time(1, 0);
|
||||
|
||||
fn file_times(file: impl AsRef<Path>) -> (FileTime, FileTime) {
|
||||
(
|
||||
file.as_ref().metadata().unwrap().accessed().unwrap().into(),
|
||||
file.as_ref().metadata().unwrap().modified().unwrap().into(),
|
||||
)
|
||||
}
|
||||
|
||||
fn symlink_times(path: &nu_path::AbsolutePath) -> (filetime::FileTime, filetime::FileTime) {
|
||||
let metadata = path.symlink_metadata().unwrap();
|
||||
|
||||
(
|
||||
filetime::FileTime::from_system_time(metadata.accessed().unwrap()),
|
||||
filetime::FileTime::from_system_time(metadata.modified().unwrap()),
|
||||
)
|
||||
}
|
||||
|
||||
// From https://github.com/nushell/nushell/pull/14214
|
||||
fn setup_symlink_fs(dirs: &Dirs, sandbox: &mut Playground<'_>) {
|
||||
sandbox.mkdir("d");
|
||||
sandbox.with_files(&[Stub::EmptyFile("f"), Stub::EmptyFile("d/f")]);
|
||||
sandbox.symlink("f", "fs");
|
||||
sandbox.symlink("d", "ds");
|
||||
sandbox.symlink("d/f", "fds");
|
||||
|
||||
// sandbox.symlink does not handle symlinks to missing files well. It panics
|
||||
// But they are useful, and they should be tested.
|
||||
#[cfg(unix)]
|
||||
{
|
||||
std::os::unix::fs::symlink(dirs.test().join("m"), dirs.test().join("fms")).unwrap();
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
std::os::windows::fs::symlink_file(dirs.test().join("m"), dirs.test().join("fms")).unwrap();
|
||||
}
|
||||
|
||||
// Change the file times to a known "old" value for comparison
|
||||
filetime::set_symlink_file_times(dirs.test().join("f"), TIME_ONE, TIME_ONE).unwrap();
|
||||
filetime::set_symlink_file_times(dirs.test().join("d"), TIME_ONE, TIME_ONE).unwrap();
|
||||
filetime::set_symlink_file_times(dirs.test().join("d/f"), TIME_ONE, TIME_ONE).unwrap();
|
||||
filetime::set_symlink_file_times(dirs.test().join("ds"), TIME_ONE, TIME_ONE).unwrap();
|
||||
filetime::set_symlink_file_times(dirs.test().join("fs"), TIME_ONE, TIME_ONE).unwrap();
|
||||
filetime::set_symlink_file_times(dirs.test().join("fds"), TIME_ONE, TIME_ONE).unwrap();
|
||||
filetime::set_symlink_file_times(dirs.test().join("fms"), TIME_ONE, TIME_ONE).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_a_file_when_it_doesnt_exist() {
|
||||
Playground::setup("create_test_1", |dirs, _sandbox| {
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch i_will_be_created.txt"
|
||||
);
|
||||
|
||||
let path = dirs.test().join("i_will_be_created.txt");
|
||||
assert!(path.exists());
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_two_files() {
|
||||
Playground::setup("create_test_2", |dirs, _sandbox| {
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch a b"
|
||||
);
|
||||
|
||||
let path = dirs.test().join("a");
|
||||
assert!(path.exists());
|
||||
|
||||
let path2 = dirs.test().join("b");
|
||||
assert!(path2.exists());
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_modified_time_of_file_to_today() {
|
||||
Playground::setup("change_time_test_9", |dirs, sandbox| {
|
||||
sandbox.with_files(&[Stub::EmptyFile("file.txt")]);
|
||||
let path = dirs.test().join("file.txt");
|
||||
|
||||
// Set file.txt's times to the past before the test to make sure `utouch` actually changes the mtime to today
|
||||
filetime::set_file_times(&path, TIME_ONE, TIME_ONE).unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -m file.txt"
|
||||
);
|
||||
|
||||
let metadata = path.metadata().unwrap();
|
||||
|
||||
// Check only the date since the time may not match exactly
|
||||
let today = Local::now().date_naive();
|
||||
let mtime_day = DateTime::<Local>::from(metadata.modified().unwrap()).date_naive();
|
||||
|
||||
assert_eq!(today, mtime_day);
|
||||
|
||||
// Check that atime remains unchanged
|
||||
assert_eq!(
|
||||
TIME_ONE,
|
||||
FileTime::from_system_time(metadata.accessed().unwrap())
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_access_time_of_file_to_today() {
|
||||
Playground::setup("change_time_test_18", |dirs, sandbox| {
|
||||
sandbox.with_files(&[Stub::EmptyFile("file.txt")]);
|
||||
let path = dirs.test().join("file.txt");
|
||||
|
||||
// Set file.txt's times to the past before the test to make sure `utouch` actually changes the atime to today
|
||||
filetime::set_file_times(&path, TIME_ONE, TIME_ONE).unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -a file.txt"
|
||||
);
|
||||
|
||||
let metadata = path.metadata().unwrap();
|
||||
|
||||
// Check only the date since the time may not match exactly
|
||||
let today = Local::now().date_naive();
|
||||
let atime_day = DateTime::<Local>::from(metadata.accessed().unwrap()).date_naive();
|
||||
|
||||
assert_eq!(today, atime_day);
|
||||
|
||||
// Check that mtime remains unchanged
|
||||
assert_eq!(
|
||||
TIME_ONE,
|
||||
FileTime::from_system_time(metadata.modified().unwrap())
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_modified_and_access_time_of_file_to_today() {
|
||||
Playground::setup("change_time_test_27", |dirs, sandbox| {
|
||||
sandbox.with_files(&[Stub::EmptyFile("file.txt")]);
|
||||
let path = dirs.test().join("file.txt");
|
||||
|
||||
filetime::set_file_times(&path, TIME_ONE, TIME_ONE).unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -a -m file.txt"
|
||||
);
|
||||
|
||||
let metadata = path.metadata().unwrap();
|
||||
|
||||
// Check only the date since the time may not match exactly
|
||||
let today = Local::now().date_naive();
|
||||
let mtime_day = DateTime::<Local>::from(metadata.modified().unwrap()).date_naive();
|
||||
let atime_day = DateTime::<Local>::from(metadata.accessed().unwrap()).date_naive();
|
||||
|
||||
assert_eq!(today, mtime_day);
|
||||
assert_eq!(today, atime_day);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_create_file_if_it_not_exists() {
|
||||
Playground::setup("change_time_test_28", |dirs, _sandbox| {
|
||||
let outcome = nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -c file.txt"
|
||||
);
|
||||
|
||||
let path = dirs.test().join("file.txt");
|
||||
|
||||
assert!(!path.exists());
|
||||
|
||||
// If --no-create is improperly handled `utouch` may error when trying to change the times of a nonexistent file
|
||||
assert!(outcome.status.success())
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_file_times_if_exists_with_no_create() {
|
||||
Playground::setup(
|
||||
"change_file_times_if_exists_with_no_create",
|
||||
|dirs, sandbox| {
|
||||
sandbox.with_files(&[Stub::EmptyFile("file.txt")]);
|
||||
let path = dirs.test().join("file.txt");
|
||||
|
||||
filetime::set_file_times(&path, TIME_ONE, TIME_ONE).unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -c file.txt"
|
||||
);
|
||||
|
||||
let metadata = path.metadata().unwrap();
|
||||
|
||||
// Check only the date since the time may not match exactly
|
||||
let today = Local::now().date_naive();
|
||||
let mtime_day = DateTime::<Local>::from(metadata.modified().unwrap()).date_naive();
|
||||
let atime_day = DateTime::<Local>::from(metadata.accessed().unwrap()).date_naive();
|
||||
|
||||
assert_eq!(today, mtime_day);
|
||||
assert_eq!(today, atime_day);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_file_three_dots() {
|
||||
Playground::setup("create_test_1", |dirs, _sandbox| {
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch file..."
|
||||
);
|
||||
|
||||
let path = dirs.test().join("file...");
|
||||
assert!(path.exists());
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_file_four_dots() {
|
||||
Playground::setup("create_test_1", |dirs, _sandbox| {
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch file...."
|
||||
);
|
||||
|
||||
let path = dirs.test().join("file....");
|
||||
assert!(path.exists());
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_file_four_dots_quotation_marks() {
|
||||
Playground::setup("create_test_1", |dirs, _sandbox| {
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch 'file....'"
|
||||
);
|
||||
|
||||
let path = dirs.test().join("file....");
|
||||
assert!(path.exists());
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_file_times_to_reference_file() {
|
||||
Playground::setup("change_dir_times_to_reference_dir", |dirs, sandbox| {
|
||||
sandbox.with_files(&[
|
||||
Stub::EmptyFile("reference_file"),
|
||||
Stub::EmptyFile("target_file"),
|
||||
]);
|
||||
|
||||
let reference = dirs.test().join("reference_file");
|
||||
let target = dirs.test().join("target_file");
|
||||
|
||||
// Change the times for reference
|
||||
filetime::set_file_times(&reference, FileTime::from_unix_time(1337, 0), TIME_ONE).unwrap();
|
||||
|
||||
// target should have today's date since it was just created, but reference should be different
|
||||
assert_ne!(
|
||||
reference.metadata().unwrap().accessed().unwrap(),
|
||||
target.metadata().unwrap().accessed().unwrap()
|
||||
);
|
||||
assert_ne!(
|
||||
reference.metadata().unwrap().modified().unwrap(),
|
||||
target.metadata().unwrap().modified().unwrap()
|
||||
);
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -r reference_file target_file"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
reference.metadata().unwrap().accessed().unwrap(),
|
||||
target.metadata().unwrap().accessed().unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
reference.metadata().unwrap().modified().unwrap(),
|
||||
target.metadata().unwrap().modified().unwrap()
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_file_mtime_to_reference() {
|
||||
Playground::setup("change_file_mtime_to_reference", |dirs, sandbox| {
|
||||
sandbox.with_files(&[
|
||||
Stub::EmptyFile("reference_file"),
|
||||
Stub::EmptyFile("target_file"),
|
||||
]);
|
||||
|
||||
let reference = dirs.test().join("reference_file");
|
||||
let target = dirs.test().join("target_file");
|
||||
|
||||
// Change the times for reference
|
||||
filetime::set_file_times(&reference, TIME_ONE, FileTime::from_unix_time(1337, 0)).unwrap();
|
||||
|
||||
// target should have today's date since it was just created, but reference should be different
|
||||
assert_ne!(file_times(&reference), file_times(&target));
|
||||
|
||||
// Save target's current atime to make sure it is preserved
|
||||
let target_original_atime = target.metadata().unwrap().accessed().unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -mr reference_file target_file"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
reference.metadata().unwrap().modified().unwrap(),
|
||||
target.metadata().unwrap().modified().unwrap()
|
||||
);
|
||||
assert_ne!(
|
||||
reference.metadata().unwrap().accessed().unwrap(),
|
||||
target.metadata().unwrap().accessed().unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
target_original_atime,
|
||||
target.metadata().unwrap().accessed().unwrap()
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
// TODO when https://github.com/uutils/coreutils/issues/6629 is fixed,
|
||||
// unignore this test
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn change_file_times_to_reference_file_with_date() {
|
||||
Playground::setup(
|
||||
"change_file_times_to_reference_file_with_date",
|
||||
|dirs, sandbox| {
|
||||
sandbox.with_files(&[
|
||||
Stub::EmptyFile("reference_file"),
|
||||
Stub::EmptyFile("target_file"),
|
||||
]);
|
||||
|
||||
let reference = dirs.test().join("reference_file");
|
||||
let target = dirs.test().join("target_file");
|
||||
|
||||
let now = Utc::now();
|
||||
|
||||
let ref_atime = now;
|
||||
let ref_mtime = now.checked_sub_days(Days::new(5)).unwrap();
|
||||
|
||||
// Change the times for reference
|
||||
filetime::set_file_times(
|
||||
reference,
|
||||
FileTime::from_unix_time(ref_atime.timestamp(), ref_atime.timestamp_subsec_nanos()),
|
||||
FileTime::from_unix_time(ref_mtime.timestamp(), ref_mtime.timestamp_subsec_nanos()),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
r#"utouch -r reference_file -d "yesterday" target_file"#
|
||||
);
|
||||
|
||||
let (got_atime, got_mtime) = file_times(target);
|
||||
let got = (
|
||||
DateTime::from_timestamp(got_atime.seconds(), got_atime.nanoseconds()).unwrap(),
|
||||
DateTime::from_timestamp(got_mtime.seconds(), got_mtime.nanoseconds()).unwrap(),
|
||||
);
|
||||
assert_eq!(
|
||||
(
|
||||
now.checked_sub_days(Days::new(1)).unwrap(),
|
||||
now.checked_sub_days(Days::new(6)).unwrap()
|
||||
),
|
||||
got
|
||||
);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_file_times_to_timestamp() {
|
||||
Playground::setup("change_file_times_to_timestamp", |dirs, sandbox| {
|
||||
sandbox.with_files(&[Stub::EmptyFile("target_file")]);
|
||||
|
||||
let target = dirs.test().join("target_file");
|
||||
let timestamp = DateTime::from_timestamp(TIME_ONE.unix_seconds(), TIME_ONE.nanoseconds())
|
||||
.unwrap()
|
||||
.to_rfc3339();
|
||||
|
||||
nu!(cwd: dirs.test(), format!("utouch --timestamp {} target_file", timestamp));
|
||||
|
||||
assert_eq!((TIME_ONE, TIME_ONE), file_times(target));
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_modified_time_of_dir_to_today() {
|
||||
Playground::setup("change_dir_mtime", |dirs, sandbox| {
|
||||
sandbox.mkdir("test_dir");
|
||||
let path = dirs.test().join("test_dir");
|
||||
|
||||
filetime::set_file_mtime(&path, TIME_ONE).unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -m test_dir"
|
||||
);
|
||||
|
||||
// Check only the date since the time may not match exactly
|
||||
let today = Local::now().date_naive();
|
||||
let mtime_day =
|
||||
DateTime::<Local>::from(path.metadata().unwrap().modified().unwrap()).date_naive();
|
||||
|
||||
assert_eq!(today, mtime_day);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_access_time_of_dir_to_today() {
|
||||
Playground::setup("change_dir_atime", |dirs, sandbox| {
|
||||
sandbox.mkdir("test_dir");
|
||||
let path = dirs.test().join("test_dir");
|
||||
|
||||
filetime::set_file_atime(&path, TIME_ONE).unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -a test_dir"
|
||||
);
|
||||
|
||||
// Check only the date since the time may not match exactly
|
||||
let today = Local::now().date_naive();
|
||||
let atime_day =
|
||||
DateTime::<Local>::from(path.metadata().unwrap().accessed().unwrap()).date_naive();
|
||||
|
||||
assert_eq!(today, atime_day);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_modified_and_access_time_of_dir_to_today() {
|
||||
Playground::setup("change_dir_times", |dirs, sandbox| {
|
||||
sandbox.mkdir("test_dir");
|
||||
let path = dirs.test().join("test_dir");
|
||||
|
||||
filetime::set_file_times(&path, TIME_ONE, TIME_ONE).unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -a -m test_dir"
|
||||
);
|
||||
|
||||
let metadata = path.metadata().unwrap();
|
||||
|
||||
// Check only the date since the time may not match exactly
|
||||
let today = Local::now().date_naive();
|
||||
let mtime_day = DateTime::<Local>::from(metadata.modified().unwrap()).date_naive();
|
||||
let atime_day = DateTime::<Local>::from(metadata.accessed().unwrap()).date_naive();
|
||||
|
||||
assert_eq!(today, mtime_day);
|
||||
assert_eq!(today, atime_day);
|
||||
})
|
||||
}
|
||||
|
||||
// TODO when https://github.com/uutils/coreutils/issues/6629 is fixed,
|
||||
// unignore this test
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn change_file_times_to_date() {
|
||||
Playground::setup("change_file_times_to_date", |dirs, sandbox| {
|
||||
sandbox.with_files(&[Stub::EmptyFile("target_file")]);
|
||||
|
||||
let expected = Utc::now().checked_sub_signed(TimeDelta::hours(2)).unwrap();
|
||||
nu!(cwd: dirs.test(), "utouch -d '-2 hours' target_file");
|
||||
|
||||
let (got_atime, got_mtime) = file_times(dirs.test().join("target_file"));
|
||||
let got_atime =
|
||||
DateTime::from_timestamp(got_atime.seconds(), got_atime.nanoseconds()).unwrap();
|
||||
let got_mtime =
|
||||
DateTime::from_timestamp(got_mtime.seconds(), got_mtime.nanoseconds()).unwrap();
|
||||
let threshold = TimeDelta::minutes(1);
|
||||
assert!(
|
||||
got_atime.signed_duration_since(expected).lt(&threshold)
|
||||
&& got_mtime.signed_duration_since(expected).lt(&threshold),
|
||||
"Expected: {}. Got: atime={}, mtime={}",
|
||||
expected,
|
||||
got_atime,
|
||||
got_mtime
|
||||
);
|
||||
assert!(got_mtime.signed_duration_since(expected).lt(&threshold));
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_dir_three_dots_times() {
|
||||
Playground::setup("change_dir_three_dots_times", |dirs, sandbox| {
|
||||
sandbox.mkdir("test_dir...");
|
||||
let path = dirs.test().join("test_dir...");
|
||||
|
||||
filetime::set_file_times(&path, TIME_ONE, TIME_ONE).unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch test_dir..."
|
||||
);
|
||||
|
||||
let metadata = path.metadata().unwrap();
|
||||
|
||||
// Check only the date since the time may not match exactly
|
||||
let today = Local::now().date_naive();
|
||||
let mtime_day = DateTime::<Local>::from(metadata.modified().unwrap()).date_naive();
|
||||
let atime_day = DateTime::<Local>::from(metadata.accessed().unwrap()).date_naive();
|
||||
|
||||
assert_eq!(today, mtime_day);
|
||||
assert_eq!(today, atime_day);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_dir_times_to_reference_dir() {
|
||||
Playground::setup("change_dir_times_to_reference_dir", |dirs, sandbox| {
|
||||
sandbox.mkdir("reference_dir");
|
||||
sandbox.mkdir("target_dir");
|
||||
|
||||
let reference = dirs.test().join("reference_dir");
|
||||
let target = dirs.test().join("target_dir");
|
||||
|
||||
// Change the times for reference
|
||||
filetime::set_file_times(&reference, FileTime::from_unix_time(1337, 0), TIME_ONE).unwrap();
|
||||
|
||||
// target should have today's date since it was just created, but reference should be different
|
||||
assert_ne!(
|
||||
reference.metadata().unwrap().accessed().unwrap(),
|
||||
target.metadata().unwrap().accessed().unwrap()
|
||||
);
|
||||
assert_ne!(
|
||||
reference.metadata().unwrap().modified().unwrap(),
|
||||
target.metadata().unwrap().modified().unwrap()
|
||||
);
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -r reference_dir target_dir"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
reference.metadata().unwrap().accessed().unwrap(),
|
||||
target.metadata().unwrap().accessed().unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
reference.metadata().unwrap().modified().unwrap(),
|
||||
target.metadata().unwrap().modified().unwrap()
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_dir_atime_to_reference() {
|
||||
Playground::setup("change_dir_atime_to_reference", |dirs, sandbox| {
|
||||
sandbox.mkdir("reference_dir");
|
||||
sandbox.mkdir("target_dir");
|
||||
|
||||
let reference = dirs.test().join("reference_dir");
|
||||
let target = dirs.test().join("target_dir");
|
||||
|
||||
// Change the times for reference
|
||||
filetime::set_file_times(&reference, FileTime::from_unix_time(1337, 0), TIME_ONE).unwrap();
|
||||
|
||||
// target should have today's date since it was just created, but reference should be different
|
||||
assert_ne!(
|
||||
reference.metadata().unwrap().accessed().unwrap(),
|
||||
target.metadata().unwrap().accessed().unwrap()
|
||||
);
|
||||
assert_ne!(
|
||||
reference.metadata().unwrap().modified().unwrap(),
|
||||
target.metadata().unwrap().modified().unwrap()
|
||||
);
|
||||
|
||||
// Save target's current mtime to make sure it is preserved
|
||||
let target_original_mtime = target.metadata().unwrap().modified().unwrap();
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"utouch -ar reference_dir target_dir"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
reference.metadata().unwrap().accessed().unwrap(),
|
||||
target.metadata().unwrap().accessed().unwrap()
|
||||
);
|
||||
assert_ne!(
|
||||
reference.metadata().unwrap().modified().unwrap(),
|
||||
target.metadata().unwrap().modified().unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
target_original_mtime,
|
||||
target.metadata().unwrap().modified().unwrap()
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_a_file_with_tilde() {
|
||||
Playground::setup("utouch with tilde", |dirs, _| {
|
||||
let actual = nu!(cwd: dirs.test(), "utouch '~tilde'");
|
||||
assert!(actual.err.is_empty());
|
||||
assert!(files_exist_at(&[Path::new("~tilde")], dirs.test()));
|
||||
|
||||
// pass variable
|
||||
let actual = nu!(cwd: dirs.test(), "let f = '~tilde2'; utouch $f");
|
||||
assert!(actual.err.is_empty());
|
||||
assert!(files_exist_at(&[Path::new("~tilde2")], dirs.test()));
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn respects_cwd() {
|
||||
Playground::setup("utouch_respects_cwd", |dirs, _sandbox| {
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"mkdir 'dir'; cd 'dir'; utouch 'i_will_be_created.txt'"
|
||||
);
|
||||
|
||||
let path = dirs.test().join("dir/i_will_be_created.txt");
|
||||
assert!(path.exists());
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reference_respects_cwd() {
|
||||
Playground::setup("utouch_reference_respects_cwd", |dirs, _sandbox| {
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"mkdir 'dir'; cd 'dir'; utouch 'ref.txt'; utouch --reference 'ref.txt' 'foo.txt'"
|
||||
);
|
||||
|
||||
let path = dirs.test().join("dir/foo.txt");
|
||||
assert!(path.exists());
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn recognizes_stdout() {
|
||||
Playground::setup("utouch_recognizes_stdout", |dirs, _sandbox| {
|
||||
nu!(cwd: dirs.test(), "utouch -");
|
||||
assert!(!dirs.test().join("-").exists());
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_symlinks() {
|
||||
Playground::setup("touch_follows_symlinks", |dirs, sandbox| {
|
||||
setup_symlink_fs(&dirs, sandbox);
|
||||
|
||||
let missing = dirs.test().join("m");
|
||||
assert!(!missing.exists());
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"
|
||||
touch fds
|
||||
touch ds
|
||||
touch fs
|
||||
touch fms
|
||||
"
|
||||
);
|
||||
|
||||
// We created the missing symlink target
|
||||
assert!(missing.exists());
|
||||
|
||||
// The timestamps for files and directories were changed from TIME_ONE
|
||||
let file_times = symlink_times(&dirs.test().join("f"));
|
||||
let dir_times = symlink_times(&dirs.test().join("d"));
|
||||
let dir_file_times = symlink_times(&dirs.test().join("d/f"));
|
||||
|
||||
assert_ne!(file_times, (TIME_ONE, TIME_ONE));
|
||||
assert_ne!(dir_times, (TIME_ONE, TIME_ONE));
|
||||
assert_ne!(dir_file_times, (TIME_ONE, TIME_ONE));
|
||||
|
||||
// For symlinks, they remain (mostly) the same
|
||||
// We can't test accessed times, since to reach the target file, the symlink must be accessed!
|
||||
let file_symlink_times = symlink_times(&dirs.test().join("fs"));
|
||||
let dir_symlink_times = symlink_times(&dirs.test().join("ds"));
|
||||
let dir_file_symlink_times = symlink_times(&dirs.test().join("fds"));
|
||||
let file_missing_symlink_times = symlink_times(&dirs.test().join("fms"));
|
||||
|
||||
assert_eq!(file_symlink_times.1, TIME_ONE);
|
||||
assert_eq!(dir_symlink_times.1, TIME_ONE);
|
||||
assert_eq!(dir_file_symlink_times.1, TIME_ONE);
|
||||
assert_eq!(file_missing_symlink_times.1, TIME_ONE);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_follow_symlinks() {
|
||||
Playground::setup("touch_touches_symlinks", |dirs, sandbox| {
|
||||
setup_symlink_fs(&dirs, sandbox);
|
||||
|
||||
let missing = dirs.test().join("m");
|
||||
assert!(!missing.exists());
|
||||
|
||||
nu!(
|
||||
cwd: dirs.test(),
|
||||
"
|
||||
touch fds -s
|
||||
touch ds -s
|
||||
touch fs -s
|
||||
touch fms -s
|
||||
"
|
||||
);
|
||||
|
||||
// We did not create the missing symlink target
|
||||
assert!(!missing.exists());
|
||||
|
||||
// The timestamps for files and directories remain the same
|
||||
let file_times = symlink_times(&dirs.test().join("f"));
|
||||
let dir_times = symlink_times(&dirs.test().join("d"));
|
||||
let dir_file_times = symlink_times(&dirs.test().join("d/f"));
|
||||
|
||||
assert_eq!(file_times, (TIME_ONE, TIME_ONE));
|
||||
assert_eq!(dir_times, (TIME_ONE, TIME_ONE));
|
||||
assert_eq!(dir_file_times, (TIME_ONE, TIME_ONE));
|
||||
|
||||
// For symlinks, everything changed. (except their targets, and paths, and personality)
|
||||
let file_symlink_times = symlink_times(&dirs.test().join("fs"));
|
||||
let dir_symlink_times = symlink_times(&dirs.test().join("ds"));
|
||||
let dir_file_symlink_times = symlink_times(&dirs.test().join("fds"));
|
||||
let file_missing_symlink_times = symlink_times(&dirs.test().join("fms"));
|
||||
|
||||
assert_ne!(file_symlink_times, (TIME_ONE, TIME_ONE));
|
||||
assert_ne!(dir_symlink_times, (TIME_ONE, TIME_ONE));
|
||||
assert_ne!(dir_file_symlink_times, (TIME_ONE, TIME_ONE));
|
||||
assert_ne!(file_missing_symlink_times, (TIME_ONE, TIME_ONE));
|
||||
})
|
||||
}
|
@ -1,20 +1,17 @@
|
||||
use crate::eval_ir_block;
|
||||
#[allow(deprecated)]
|
||||
use crate::{current_dir, get_full_help};
|
||||
use crate::get_full_help;
|
||||
use nu_path::{expand_path_with, AbsolutePathBuf};
|
||||
use nu_protocol::{
|
||||
ast::{
|
||||
Assignment, Block, Call, Expr, Expression, ExternalArgument, PathMember, PipelineElement,
|
||||
PipelineRedirection, RedirectionSource, RedirectionTarget,
|
||||
},
|
||||
ast::{Assignment, Block, Call, Expr, Expression, ExternalArgument, PathMember},
|
||||
debugger::DebugContext,
|
||||
engine::{Closure, EngineState, Redirection, Stack, StateWorkingSet},
|
||||
engine::{Closure, EngineState, Stack},
|
||||
eval_base::Eval,
|
||||
BlockId, ByteStreamSource, Config, DataSource, FromValue, IntoPipelineData, OutDest,
|
||||
PipelineData, PipelineMetadata, ShellError, Span, Spanned, Type, Value, VarId, ENV_VARIABLE_ID,
|
||||
BlockId, Config, DataSource, IntoPipelineData, PipelineData, PipelineMetadata, ShellError,
|
||||
Span, Type, Value, VarId, ENV_VARIABLE_ID,
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use std::{fs::OpenOptions, path::PathBuf, sync::Arc};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub fn eval_call<D: DebugContext>(
|
||||
engine_state: &EngineState,
|
||||
@ -301,177 +298,6 @@ pub fn eval_expression_with_input<D: DebugContext>(
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
fn eval_redirection<D: DebugContext>(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
target: &RedirectionTarget,
|
||||
next_out: Option<OutDest>,
|
||||
) -> Result<Redirection, ShellError> {
|
||||
match target {
|
||||
RedirectionTarget::File { expr, append, .. } => {
|
||||
#[allow(deprecated)]
|
||||
let cwd = current_dir(engine_state, stack)?;
|
||||
let value = eval_expression::<D>(engine_state, stack, expr)?;
|
||||
let path = Spanned::<PathBuf>::from_value(value)?.item;
|
||||
let path = expand_path_with(path, cwd, true);
|
||||
|
||||
let mut options = OpenOptions::new();
|
||||
if *append {
|
||||
options.append(true);
|
||||
} else {
|
||||
options.write(true).truncate(true);
|
||||
}
|
||||
Ok(Redirection::file(options.create(true).open(path)?))
|
||||
}
|
||||
RedirectionTarget::Pipe { .. } => {
|
||||
let dest = match next_out {
|
||||
None | Some(OutDest::PipeSeparate) => OutDest::Pipe,
|
||||
Some(next) => next,
|
||||
};
|
||||
Ok(Redirection::Pipe(dest))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_element_redirection<D: DebugContext>(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
element_redirection: Option<&PipelineRedirection>,
|
||||
pipe_redirection: (Option<OutDest>, Option<OutDest>),
|
||||
) -> Result<(Option<Redirection>, Option<Redirection>), ShellError> {
|
||||
let (next_out, next_err) = pipe_redirection;
|
||||
|
||||
if let Some(redirection) = element_redirection {
|
||||
match redirection {
|
||||
PipelineRedirection::Single {
|
||||
source: RedirectionSource::Stdout,
|
||||
target,
|
||||
} => {
|
||||
let stdout = eval_redirection::<D>(engine_state, stack, target, next_out)?;
|
||||
Ok((Some(stdout), next_err.map(Redirection::Pipe)))
|
||||
}
|
||||
PipelineRedirection::Single {
|
||||
source: RedirectionSource::Stderr,
|
||||
target,
|
||||
} => {
|
||||
let stderr = eval_redirection::<D>(engine_state, stack, target, None)?;
|
||||
if matches!(stderr, Redirection::Pipe(OutDest::Pipe)) {
|
||||
let dest = match next_out {
|
||||
None | Some(OutDest::PipeSeparate) => OutDest::Pipe,
|
||||
Some(next) => next,
|
||||
};
|
||||
// e>| redirection, don't override current stack `stdout`
|
||||
Ok((None, Some(Redirection::Pipe(dest))))
|
||||
} else {
|
||||
Ok((next_out.map(Redirection::Pipe), Some(stderr)))
|
||||
}
|
||||
}
|
||||
PipelineRedirection::Single {
|
||||
source: RedirectionSource::StdoutAndStderr,
|
||||
target,
|
||||
} => {
|
||||
let stream = eval_redirection::<D>(engine_state, stack, target, next_out)?;
|
||||
Ok((Some(stream.clone()), Some(stream)))
|
||||
}
|
||||
PipelineRedirection::Separate { out, err } => {
|
||||
let stdout = eval_redirection::<D>(engine_state, stack, out, None)?; // `out` cannot be `RedirectionTarget::Pipe`
|
||||
let stderr = eval_redirection::<D>(engine_state, stack, err, next_out)?;
|
||||
Ok((Some(stdout), Some(stderr)))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Ok((
|
||||
next_out.map(Redirection::Pipe),
|
||||
next_err.map(Redirection::Pipe),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_element_with_input_inner<D: DebugContext>(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
element: &PipelineElement,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let data = eval_expression_with_input::<D>(engine_state, stack, &element.expr, input)?;
|
||||
|
||||
let is_external = if let PipelineData::ByteStream(stream, ..) = &data {
|
||||
matches!(stream.source(), ByteStreamSource::Child(..))
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if let Some(redirection) = element.redirection.as_ref() {
|
||||
if !is_external {
|
||||
match redirection {
|
||||
&PipelineRedirection::Single {
|
||||
source: RedirectionSource::Stderr,
|
||||
target: RedirectionTarget::Pipe { span },
|
||||
}
|
||||
| &PipelineRedirection::Separate {
|
||||
err: RedirectionTarget::Pipe { span },
|
||||
..
|
||||
} => {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "`e>|` only works on external commands".into(),
|
||||
msg: "`e>|` only works on external commands".into(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
&PipelineRedirection::Single {
|
||||
source: RedirectionSource::StdoutAndStderr,
|
||||
target: RedirectionTarget::Pipe { span },
|
||||
} => {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "`o+e>|` only works on external commands".into(),
|
||||
msg: "`o+e>|` only works on external commands".into(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let data = if let Some(OutDest::File(file)) = stack.pipe_stdout() {
|
||||
match &data {
|
||||
PipelineData::Value(..) | PipelineData::ListStream(..) => {
|
||||
data.write_to(file.as_ref())?;
|
||||
PipelineData::Empty
|
||||
}
|
||||
PipelineData::ByteStream(..) => {
|
||||
if !is_external {
|
||||
data.write_to(file.as_ref())?;
|
||||
PipelineData::Empty
|
||||
} else {
|
||||
data
|
||||
}
|
||||
}
|
||||
PipelineData::Empty => PipelineData::Empty,
|
||||
}
|
||||
} else {
|
||||
data
|
||||
};
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
fn eval_element_with_input<D: DebugContext>(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
element: &PipelineElement,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
D::enter_element(engine_state, element);
|
||||
let result = eval_element_with_input_inner::<D>(engine_state, stack, element, input);
|
||||
D::leave_element(engine_state, element, &result);
|
||||
result
|
||||
}
|
||||
|
||||
pub fn eval_block_with_early_return<D: DebugContext>(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
@ -484,86 +310,13 @@ pub fn eval_block_with_early_return<D: DebugContext>(
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_block_inner<D: DebugContext>(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
block: &Block,
|
||||
mut input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
// Remove once IR is the default.
|
||||
if stack.use_ir {
|
||||
return eval_ir_block::<D>(engine_state, stack, block, input);
|
||||
}
|
||||
|
||||
let num_pipelines = block.len();
|
||||
|
||||
for (pipeline_idx, pipeline) in block.pipelines.iter().enumerate() {
|
||||
let last_pipeline = pipeline_idx >= num_pipelines - 1;
|
||||
|
||||
let Some((last, elements)) = pipeline.elements.split_last() else {
|
||||
debug_assert!(false, "pipelines should have at least one element");
|
||||
continue;
|
||||
};
|
||||
|
||||
for (i, element) in elements.iter().enumerate() {
|
||||
let next = elements.get(i + 1).unwrap_or(last);
|
||||
let (next_out, next_err) = next.pipe_redirection(&StateWorkingSet::new(engine_state));
|
||||
let (stdout, stderr) = eval_element_redirection::<D>(
|
||||
engine_state,
|
||||
stack,
|
||||
element.redirection.as_ref(),
|
||||
(next_out.or(Some(OutDest::Pipe)), next_err),
|
||||
)?;
|
||||
let stack = &mut stack.push_redirection(stdout, stderr);
|
||||
input = eval_element_with_input::<D>(engine_state, stack, element, input)?;
|
||||
}
|
||||
|
||||
if last_pipeline {
|
||||
let (stdout, stderr) = eval_element_redirection::<D>(
|
||||
engine_state,
|
||||
stack,
|
||||
last.redirection.as_ref(),
|
||||
(stack.pipe_stdout().cloned(), stack.pipe_stderr().cloned()),
|
||||
)?;
|
||||
let stack = &mut stack.push_redirection(stdout, stderr);
|
||||
input = eval_element_with_input::<D>(engine_state, stack, last, input)?;
|
||||
} else {
|
||||
let (stdout, stderr) = eval_element_redirection::<D>(
|
||||
engine_state,
|
||||
stack,
|
||||
last.redirection.as_ref(),
|
||||
(None, None),
|
||||
)?;
|
||||
let stack = &mut stack.push_redirection(stdout, stderr);
|
||||
match eval_element_with_input::<D>(engine_state, stack, last, input)? {
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
let span = stream.span();
|
||||
if let Err(err) = stream.drain() {
|
||||
stack.set_last_error(&err);
|
||||
return Err(err);
|
||||
} else {
|
||||
stack.set_last_exit_code(0, span);
|
||||
}
|
||||
}
|
||||
PipelineData::ListStream(stream, ..) => stream.drain()?,
|
||||
PipelineData::Value(..) | PipelineData::Empty => {}
|
||||
}
|
||||
input = PipelineData::Empty;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
pub fn eval_block<D: DebugContext>(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
block: &Block,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
D::enter_block(engine_state, block);
|
||||
let result = eval_block_inner::<D>(engine_state, stack, block, input);
|
||||
D::leave_block(engine_state, block);
|
||||
let result = eval_ir_block::<D>(engine_state, stack, block, input);
|
||||
if let Err(err) = &result {
|
||||
stack.set_last_error(err);
|
||||
}
|
||||
|
@ -3392,6 +3392,7 @@ pub fn parse_signature_helper(working_set: &mut StateWorkingSet, span: Span) ->
|
||||
Arg,
|
||||
AfterCommaArg,
|
||||
Type,
|
||||
AfterType,
|
||||
DefaultValue,
|
||||
}
|
||||
|
||||
@ -3425,7 +3426,9 @@ pub fn parse_signature_helper(working_set: &mut StateWorkingSet, span: Span) ->
|
||||
let mut args: Vec<Arg> = vec![];
|
||||
let mut parse_mode = ParseMode::Arg;
|
||||
|
||||
for token in &output {
|
||||
for (index, token) in output.iter().enumerate() {
|
||||
let last_token = index == output.len() - 1;
|
||||
|
||||
match token {
|
||||
Token {
|
||||
contents: crate::TokenContents::Item | crate::TokenContents::AssignmentOperator,
|
||||
@ -3437,10 +3440,12 @@ pub fn parse_signature_helper(working_set: &mut StateWorkingSet, span: Span) ->
|
||||
// The : symbol separates types
|
||||
if contents == b":" {
|
||||
match parse_mode {
|
||||
ParseMode::Arg if last_token => working_set
|
||||
.error(ParseError::Expected("type", Span::new(span.end, span.end))),
|
||||
ParseMode::Arg => {
|
||||
parse_mode = ParseMode::Type;
|
||||
}
|
||||
ParseMode::AfterCommaArg => {
|
||||
ParseMode::AfterCommaArg | ParseMode::AfterType => {
|
||||
working_set.error(ParseError::Expected("parameter or flag", span));
|
||||
}
|
||||
ParseMode::Type | ParseMode::DefaultValue => {
|
||||
@ -3452,9 +3457,15 @@ pub fn parse_signature_helper(working_set: &mut StateWorkingSet, span: Span) ->
|
||||
// The = symbol separates a variable from its default value
|
||||
else if contents == b"=" {
|
||||
match parse_mode {
|
||||
ParseMode::Type | ParseMode::Arg => {
|
||||
ParseMode::Arg | ParseMode::AfterType if last_token => working_set.error(
|
||||
ParseError::Expected("default value", Span::new(span.end, span.end)),
|
||||
),
|
||||
ParseMode::Arg | ParseMode::AfterType => {
|
||||
parse_mode = ParseMode::DefaultValue;
|
||||
}
|
||||
ParseMode::Type => {
|
||||
working_set.error(ParseError::Expected("type", span));
|
||||
}
|
||||
ParseMode::AfterCommaArg => {
|
||||
working_set.error(ParseError::Expected("parameter or flag", span));
|
||||
}
|
||||
@ -3467,7 +3478,9 @@ pub fn parse_signature_helper(working_set: &mut StateWorkingSet, span: Span) ->
|
||||
// The , symbol separates params only
|
||||
else if contents == b"," {
|
||||
match parse_mode {
|
||||
ParseMode::Arg => parse_mode = ParseMode::AfterCommaArg,
|
||||
ParseMode::Arg | ParseMode::AfterType => {
|
||||
parse_mode = ParseMode::AfterCommaArg
|
||||
}
|
||||
ParseMode::AfterCommaArg => {
|
||||
working_set.error(ParseError::Expected("parameter or flag", span));
|
||||
}
|
||||
@ -3480,7 +3493,7 @@ pub fn parse_signature_helper(working_set: &mut StateWorkingSet, span: Span) ->
|
||||
}
|
||||
} else {
|
||||
match parse_mode {
|
||||
ParseMode::Arg | ParseMode::AfterCommaArg => {
|
||||
ParseMode::Arg | ParseMode::AfterCommaArg | ParseMode::AfterType => {
|
||||
// Long flag with optional short form following with no whitespace, e.g. --output, --age(-a)
|
||||
if contents.starts_with(b"--") && contents.len() > 2 {
|
||||
// Split the long flag from the short flag with the ( character as delimiter.
|
||||
@ -3790,7 +3803,7 @@ pub fn parse_signature_helper(working_set: &mut StateWorkingSet, span: Span) ->
|
||||
}
|
||||
}
|
||||
}
|
||||
parse_mode = ParseMode::Arg;
|
||||
parse_mode = ParseMode::AfterType;
|
||||
}
|
||||
ParseMode::DefaultValue => {
|
||||
if let Some(last) = args.last_mut() {
|
||||
|
@ -13,9 +13,12 @@ bench = false
|
||||
|
||||
[dependencies]
|
||||
dirs = { workspace = true }
|
||||
cfg-if = "1.0.0"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
omnipath = { workspace = true }
|
||||
once_cell = "1.20.1"
|
||||
winapi = { version = "0.3.9", features = ["fileapi"] }
|
||||
|
||||
[target.'cfg(all(unix, not(target_os = "macos"), not(target_os = "android")))'.dependencies]
|
||||
pwd = { workspace = true }
|
||||
|
@ -4,6 +4,8 @@ mod components;
|
||||
pub mod dots;
|
||||
pub mod expansions;
|
||||
pub mod form;
|
||||
#[cfg(target_os="windows")]
|
||||
pub mod pwd_per_drive;
|
||||
mod helpers;
|
||||
mod path;
|
||||
mod tilde;
|
||||
|
348
crates/nu-path/src/pwd_per_drive.rs
Normal file
348
crates/nu-path/src/pwd_per_drive.rs
Normal file
@ -0,0 +1,348 @@
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(target_os="windows")] {
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use std::path::{ Path, PathBuf };
|
||||
use std::sync::Mutex;
|
||||
|
||||
struct DrivePWDmap {
|
||||
map: [Option<String>; 26], // Fixed-size array for A-Z
|
||||
}
|
||||
|
||||
impl DrivePWDmap {
|
||||
pub fn new() -> Self {
|
||||
DrivePWDmap {
|
||||
map: Default::default(), // Initialize all to `None`
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the PWD for the drive letter in the path which is an absolute path
|
||||
pub fn set_pwd(&mut self, path: &Path) -> Result<(), String> {
|
||||
if let Some(drive_letter) = Self::extract_drive_letter(path) {
|
||||
if let Some(index) = Self::drive_to_index(drive_letter) {
|
||||
if let Some(path_str) = path.to_str() {
|
||||
self.map[index] = Some(path_str.to_string());
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("Invalid path: {}", path.display()))
|
||||
}
|
||||
} else {
|
||||
Err(format!("Invalid drive letter: {}", drive_letter))
|
||||
}
|
||||
} else {
|
||||
Err(format!("Invalid path: {}", path.display()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the PWD for a drive letter, if not yet, try using
|
||||
/// winapi GetFullPathNameW to get "T:", "T:/" can be default
|
||||
pub fn get_pwd(&mut self, drive: char) -> Option<String> {
|
||||
Self::drive_to_index(drive).map(|index| {
|
||||
self.map[index]
|
||||
.clone()
|
||||
.unwrap_or_else(||
|
||||
if let Some(system_pwd) = get_full_path_name_w(&format!("{}:", drive.to_ascii_uppercase())) {
|
||||
self.map[index] = Some(system_pwd.clone());
|
||||
system_pwd
|
||||
} else {
|
||||
format!("{}:/", drive.to_ascii_uppercase())
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Expand a relative path using the PWD of the drive
|
||||
pub fn expand_path(&mut self, path: &Path) -> Option<PathBuf> {
|
||||
let path_str = path.to_str()?;
|
||||
if let Some(drive_letter) = Self::extract_drive_letter(path) {
|
||||
if let Some(pwd) = self.get_pwd(drive_letter) {
|
||||
// Combine current PWD with the relative path
|
||||
let mut base = PathBuf::from(Self::ensure_trailing_separator(&pwd));
|
||||
base.push(path_str.split_at(2).1); // Skip the "C:" part of the relative path
|
||||
Some(base)
|
||||
} else {
|
||||
None // PWD on Drive letter not found
|
||||
}
|
||||
} else {
|
||||
None // Invalid or no drive letter
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to convert a drive letter to an array index
|
||||
fn drive_to_index(drive: char) -> Option<usize> {
|
||||
let drive = drive.to_ascii_uppercase();
|
||||
if ('A'..='Z').contains(&drive) {
|
||||
Some((drive as usize) - ('A' as usize))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the drive letter from a path (e.g., `C:test` -> `C`)
|
||||
fn extract_drive_letter(path: &Path) -> Option<char> {
|
||||
path.to_str()
|
||||
.and_then(|s| s.chars().next())
|
||||
.filter(|c| c.is_ascii_alphabetic())
|
||||
}
|
||||
|
||||
/// Ensure a path has a trailing `\`
|
||||
fn ensure_trailing_separator(path: &str) -> String {
|
||||
if !path.ends_with('\\') && !path.ends_with('/') {
|
||||
format!("{}/", path)
|
||||
} else {
|
||||
path.to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GetFullPathW
|
||||
fn get_full_path_name_w(path_str: &str) -> Option<String> {
|
||||
use std::ffi::OsString;
|
||||
use std::os::windows::ffi::OsStringExt;
|
||||
use std::os::windows::ffi::OsStrExt;
|
||||
use winapi::um::fileapi::GetFullPathNameW;
|
||||
use winapi::um::winnt::WCHAR;
|
||||
|
||||
const MAX_PATH : usize = 260;
|
||||
let mut buffer: [WCHAR; MAX_PATH] = [0; MAX_PATH];
|
||||
|
||||
unsafe {
|
||||
// Convert input to wide string.
|
||||
let wide_path: Vec<u16> = OsString::from(path_str).encode_wide().chain(Some(0)).collect();
|
||||
let length = GetFullPathNameW(
|
||||
wide_path.as_ptr(),
|
||||
buffer.len() as u32,
|
||||
buffer.as_mut_ptr(),
|
||||
std::ptr::null_mut(),
|
||||
);
|
||||
|
||||
if length > 0 && (length as usize) < MAX_PATH {
|
||||
let path = OsString::from_wide(&buffer[..length as usize]);
|
||||
if let Some(path_str) = path.to_str() {
|
||||
let path_string = path_str.to_string();
|
||||
{
|
||||
return Some(path_string);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Global singleton instance of DrivePwdMap
|
||||
static DRIVE_PWD_MAP: Lazy<Mutex<DrivePWDmap>> = Lazy::new(|| Mutex::new(DrivePWDmap::new()));
|
||||
|
||||
/// Public API to access the singleton instance
|
||||
fn get_drive_pwd_map() -> &'static Mutex<DrivePWDmap> {
|
||||
&DRIVE_PWD_MAP
|
||||
}
|
||||
|
||||
/// Test for DrivePWD map
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::Path;
|
||||
|
||||
#[test]
|
||||
fn test_singleton_set_and_get_pwd() {
|
||||
let drive_pwd_map = get_drive_pwd_map();
|
||||
{
|
||||
let mut map = drive_pwd_map.lock().unwrap();
|
||||
|
||||
// Set PWD for drive C
|
||||
assert!(map.set_pwd(Path::new("C:\\Users\\Example")).is_ok());
|
||||
}
|
||||
|
||||
{
|
||||
let map = drive_pwd_map.lock().unwrap();
|
||||
|
||||
// Get PWD for drive C
|
||||
assert_eq!(map.get_pwd('C'), Some("C:\\Users\\Example".to_string()));
|
||||
|
||||
// Get PWD for drive E (not set, should return E:\)
|
||||
assert_eq!(map.get_pwd('E'), Some("E:\\".to_string()));
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
fn test_expand_path() {
|
||||
let mut drive_map = DrivePWDmap::new();
|
||||
|
||||
// Set PWD for drive C
|
||||
drive_map.set_pwd(Path::new("C:\\Users\\Home")).unwrap();
|
||||
|
||||
// Expand a relative path
|
||||
let expanded = drive_map.expand_path(Path::new("C:test"));
|
||||
assert_eq!(expanded, Some(PathBuf::from("C:\\Users\\Home\\test")));
|
||||
|
||||
// Expand an absolute path
|
||||
let expanded = drive_map.expand_path(Path::new("C:\\absolute\\path"));
|
||||
assert_eq!(expanded, Some(PathBuf::from("C:\\absolute\\path")));
|
||||
|
||||
// Expand with no drive letter
|
||||
let expanded = drive_map.expand_path(Path::new("\\no_drive"));
|
||||
assert_eq!(expanded, None);
|
||||
|
||||
// Expand with no PWD set for the drive
|
||||
let expanded = drive_map.expand_path(Path::new("D:test"));
|
||||
assert_eq!(expanded, Some(PathBuf::from("D:\\test")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_and_get_pwd() {
|
||||
let mut drive_map = DrivePWDmap::new();
|
||||
|
||||
// Set PWD for drive C
|
||||
assert!(drive_map.set_pwd(Path::new("C:\\Users\\Example")).is_ok());
|
||||
assert_eq!(drive_map.get_pwd('C'), Some("C:\\Users\\Example".to_string()));
|
||||
|
||||
// Set PWD for drive D
|
||||
assert!(drive_map.set_pwd(Path::new("D:\\Projects")).is_ok());
|
||||
assert_eq!(drive_map.get_pwd('D'), Some("D:\\Projects".to_string()));
|
||||
|
||||
// Get PWD for drive E (not set, should return E:\)
|
||||
assert_eq!(drive_map.get_pwd('E'), Some("E:\\".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_pwd_invalid_path() {
|
||||
let mut drive_map = DrivePWDmap::new();
|
||||
|
||||
// Invalid path (no drive letter)
|
||||
let result = drive_map.set_pwd(Path::new("\\InvalidPath"));
|
||||
assert!(result.is_err());
|
||||
assert_eq!(result.unwrap_err(), "Invalid path: \\InvalidPath");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_pwd_invalid_drive() {
|
||||
let drive_map = DrivePWDmap::new();
|
||||
|
||||
// Get PWD for a drive not set (e.g., Z)
|
||||
assert_eq!(drive_map.get_pwd('Z'), Some("Z:\\".to_string()));
|
||||
|
||||
// Invalid drive letter (non-alphabetic)
|
||||
assert_eq!(drive_map.get_pwd('1'), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_drive_to_index() {
|
||||
// Valid drive letters
|
||||
assert_eq!(DrivePWDmap::drive_to_index('A'), Some(0));
|
||||
assert_eq!(DrivePWDmap::drive_to_index('Z'), Some(25));
|
||||
// Valid drive letters
|
||||
assert_eq!(DrivePWDmap::drive_to_index('a'), Some(0));
|
||||
assert_eq!(DrivePWDmap::drive_to_index('z'), Some(25));
|
||||
for i in 1..25 {
|
||||
assert_eq!(DrivePWDmap::drive_to_index(std::char::from_u32(('A' as usize + i) as u32).unwrap()), Some(i));
|
||||
assert_eq!(DrivePWDmap::drive_to_index(std::char::from_u32(('a' as usize + i) as u32).unwrap()), Some(i));
|
||||
}
|
||||
|
||||
// Invalid drive letters
|
||||
assert_eq!(DrivePWDmap::drive_to_index('1'), None);
|
||||
assert_eq!(DrivePWDmap::drive_to_index('$'), None);
|
||||
}
|
||||
}}}
|
||||
|
||||
/// Usage for pwd_per_drive
|
||||
///
|
||||
/// Upon change PWD, call set_pwd_per_drive() with absolute path
|
||||
///
|
||||
/// Call expand_pwd_per_drive() with relative path to get absolution path
|
||||
///
|
||||
/// Doctest
|
||||
/// ```Rust
|
||||
/// // Set PWD for drive C
|
||||
/// set_pwd_per_drive(Path::new("C:\\Users\\Home")).unwrap();
|
||||
///
|
||||
/// // Expand a relative path
|
||||
/// let expanded = expand_pwd_per_drive(Path::new("C:test"));
|
||||
/// assert_eq!(expanded, Some(PathBuf::from("C:\\Users\\Home\\test")));
|
||||
///
|
||||
/// // Will NOT expand an absolute path
|
||||
/// let expanded = expand_pwd_per_drive(Path::new("C:\\absolute\\path"));
|
||||
/// assert_eq!(expanded, None);
|
||||
///
|
||||
/// // Expand with no drive letter
|
||||
/// let expanded = expand_pwd_per_drive(Path::new("\\no_drive"));
|
||||
/// assert_eq!(expanded, None);
|
||||
///
|
||||
/// // Expand with no PWD set for the drive
|
||||
/// let expanded = expand_pwd_per_drive(Path::new("D:test"));
|
||||
/// assert_eq!(expanded, Some(PathBuf::from("D:\\test")));
|
||||
/// ```
|
||||
pub mod pwd_per_drive {
|
||||
use std::path::{ Path, PathBuf };
|
||||
use super::{get_drive_pwd_map};
|
||||
|
||||
/// set_pwd_per_drive
|
||||
/// record PWD for drive, path must be absolute path
|
||||
/// return Ok(()) if succeeded, otherwise error message
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn set_pwd_per_drive(path: &Path) -> Result<(), String> {
|
||||
get_drive_pwd_map().lock().unwrap().set_pwd(path)
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub fn set_pwd_per_drive(path: &Path) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// expand_pwe_per_drive
|
||||
/// input relative path, expand PWD to construct absolute path
|
||||
/// return PathBuf for absolute path, None if input path is invalid
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn expand_pwd_per_drive(path: &Path) -> Option<PathBuf> {
|
||||
if need_expand_pwd_per_drive(path) {
|
||||
get_drive_pwd_map().lock().unwrap().expand_path(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// expand_pwd_per_drive will return None on non-windows platform
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub fn expand_pwd_per_drive(_path: &Path) -> Option<PathBuf> {
|
||||
None
|
||||
}
|
||||
|
||||
/// If input path is relative path with drive letter,
|
||||
/// it can be expanded with PWD per drive
|
||||
#[cfg(target_os = "windows")]
|
||||
fn need_expand_pwd_per_drive(path: &Path) -> bool {
|
||||
if let Some(path_str) = path.to_str() {
|
||||
let chars: Vec<char> = path_str.chars().collect();
|
||||
if chars.len() >= 2 {
|
||||
return chars[1] == ':' && (chars.len() == 2 || (chars[2] != '/' && chars[2] != '\\'));
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// On non-windows platform, will not expand
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn need_expand_pwd_per_drive(path: &Path) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_usage_for_pwd_per_drive() {
|
||||
// Set PWD for drive C
|
||||
set_pwd_per_drive(Path::new("C:\\Users\\Home")).unwrap();
|
||||
|
||||
// Expand a relative path
|
||||
let expanded = expand_pwd_per_drive(Path::new("C:test"));
|
||||
assert_eq!(expanded, Some(PathBuf::from("C:\\Users\\Home\\test")));
|
||||
|
||||
// Will NOT expand an absolute path
|
||||
let expanded = expand_pwd_per_drive(Path::new("C:\\absolute\\path"));
|
||||
assert_eq!(expanded, None);
|
||||
|
||||
// Expand with no drive letter
|
||||
let expanded = expand_pwd_per_drive(Path::new("\\no_drive"));
|
||||
assert_eq!(expanded, None);
|
||||
|
||||
// Expand with no PWD set for the drive
|
||||
let expanded = expand_pwd_per_drive(Path::new("D:test"));
|
||||
assert_eq!(expanded, Some(PathBuf::from("D:\\test")));
|
||||
}
|
||||
}
|
@ -21,6 +21,12 @@ fn expand_tilde_with_home(path: impl AsRef<Path>, home: Option<PathBuf>) -> Path
|
||||
let path = path.as_ref();
|
||||
|
||||
if !path.starts_with("~") {
|
||||
use crate::pwd_per_drive:: {
|
||||
pwd_per_drive::expand_pwd_per_drive,
|
||||
};
|
||||
if let Some(expanded_dir) = expand_pwd_per_drive(path) {
|
||||
return expanded_dir;
|
||||
}
|
||||
let string = path.to_string_lossy();
|
||||
let mut path_as_string = string.as_ref().bytes();
|
||||
return match path_as_string.next() {
|
||||
|
@ -1,5 +1,6 @@
|
||||
use super::prelude::*;
|
||||
use crate as nu_protocol;
|
||||
use crate::Record;
|
||||
|
||||
/// Definition of a parsed hook from the config object
|
||||
#[derive(Clone, Debug, IntoValue, PartialEq, Serialize, Deserialize)]
|
||||
@ -14,14 +15,14 @@ pub struct Hooks {
|
||||
impl Hooks {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
pre_prompt: None,
|
||||
pre_execution: None,
|
||||
env_change: None,
|
||||
pre_prompt: Some(Value::list(vec![], Span::unknown())),
|
||||
pre_execution: Some(Value::list(vec![], Span::unknown())),
|
||||
env_change: Some(Value::record(Record::default(), Span::unknown())),
|
||||
display_output: Some(Value::string(
|
||||
"if (term size).columns >= 100 { table -e } else { table }",
|
||||
Span::unknown(),
|
||||
)),
|
||||
command_not_found: None,
|
||||
command_not_found: Some(Value::list(vec![], Span::unknown())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -45,8 +45,6 @@ pub struct Stack {
|
||||
pub arguments: ArgumentStack,
|
||||
/// Error handler stack for IR evaluation
|
||||
pub error_handlers: ErrorHandlerStack,
|
||||
/// Set true to always use IR mode
|
||||
pub use_ir: bool,
|
||||
pub recursion_count: u64,
|
||||
pub parent_stack: Option<Arc<Stack>>,
|
||||
/// Variables that have been deleted (this is used to hide values from parent stack lookups)
|
||||
@ -78,7 +76,6 @@ impl Stack {
|
||||
active_overlays: vec![DEFAULT_OVERLAY_NAME.to_string()],
|
||||
arguments: ArgumentStack::new(),
|
||||
error_handlers: ErrorHandlerStack::new(),
|
||||
use_ir: true,
|
||||
recursion_count: 0,
|
||||
parent_stack: None,
|
||||
parent_deletions: vec![],
|
||||
@ -99,7 +96,6 @@ impl Stack {
|
||||
active_overlays: parent.active_overlays.clone(),
|
||||
arguments: ArgumentStack::new(),
|
||||
error_handlers: ErrorHandlerStack::new(),
|
||||
use_ir: parent.use_ir,
|
||||
recursion_count: parent.recursion_count,
|
||||
vars: vec![],
|
||||
parent_deletions: vec![],
|
||||
@ -317,7 +313,6 @@ impl Stack {
|
||||
active_overlays: self.active_overlays.clone(),
|
||||
arguments: ArgumentStack::new(),
|
||||
error_handlers: ErrorHandlerStack::new(),
|
||||
use_ir: self.use_ir,
|
||||
recursion_count: self.recursion_count,
|
||||
parent_stack: None,
|
||||
parent_deletions: vec![],
|
||||
@ -351,7 +346,6 @@ impl Stack {
|
||||
active_overlays: self.active_overlays.clone(),
|
||||
arguments: ArgumentStack::new(),
|
||||
error_handlers: ErrorHandlerStack::new(),
|
||||
use_ir: self.use_ir,
|
||||
recursion_count: self.recursion_count,
|
||||
parent_stack: None,
|
||||
parent_deletions: vec![],
|
||||
|
@ -79,7 +79,7 @@ def create-test-record [] nothing -> record<before-each: string, after-each: str
|
||||
| group-by --to-table annotation
|
||||
| update items {|x|
|
||||
$x.items.function_name
|
||||
| if $x.group in ["test", "test-skip"] {
|
||||
| if $x.annotation in ["test", "test-skip"] {
|
||||
$in
|
||||
} else {
|
||||
get 0
|
||||
|
@ -471,7 +471,6 @@ unsafe fn null_terminated_wchar_to_string(slice: &[u16]) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::uninit_vec)]
|
||||
unsafe fn get_process_data(
|
||||
handle: HANDLE,
|
||||
ptr: *const c_void,
|
||||
@ -518,7 +517,6 @@ unsafe fn get_region_size(handle: HANDLE, ptr: *const c_void) -> Result<usize, &
|
||||
Ok((meminfo.RegionSize as isize - ptr.offset_from(meminfo.BaseAddress)) as usize)
|
||||
}
|
||||
|
||||
#[allow(clippy::uninit_vec)]
|
||||
unsafe fn ph_query_process_variable_size(
|
||||
process_handle: HANDLE,
|
||||
process_information_class: PROCESSINFOCLASS,
|
||||
|
@ -248,7 +248,6 @@ pub struct NuOpts {
|
||||
pub locale: Option<String>,
|
||||
pub envs: Option<Vec<(String, String)>>,
|
||||
pub collapse_output: Option<bool>,
|
||||
pub use_ir: Option<bool>,
|
||||
// Note: At the time this was added, passing in a file path was more convenient. However,
|
||||
// passing in file contents seems like a better API - consider this when adding new uses of
|
||||
// this field.
|
||||
@ -301,15 +300,6 @@ pub fn nu_run_test(opts: NuOpts, commands: impl AsRef<str>, with_std: bool) -> O
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped());
|
||||
|
||||
// Explicitly set NU_DISABLE_IR
|
||||
if let Some(use_ir) = opts.use_ir {
|
||||
if !use_ir {
|
||||
command.env("NU_DISABLE_IR", "1");
|
||||
} else {
|
||||
command.env_remove("NU_DISABLE_IR");
|
||||
}
|
||||
}
|
||||
|
||||
// Uncomment to debug the command being run:
|
||||
// println!("=== command\n{command:?}\n");
|
||||
|
||||
|
@ -34,7 +34,7 @@ impl PluginCommand for ExprAggGroups {
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Get the groiup index of the group by operations.",
|
||||
description: "Get the group index of the group by operations.",
|
||||
example: r#"[[group value]; [one 94] [one 95] [one 96] [two 97] [two 98] [two 99]]
|
||||
| polars into-df
|
||||
| polars group-by group
|
||||
|
@ -28,7 +28,7 @@ impl PluginCommand for ValueCount {
|
||||
.named(
|
||||
"column",
|
||||
SyntaxShape::String,
|
||||
"Provide a custom name for the coutn column",
|
||||
"Provide a custom name for the count column",
|
||||
Some('c'),
|
||||
)
|
||||
.switch("sort", "Whether or not values should be sorted", Some('s'))
|
||||
|
@ -1201,6 +1201,18 @@ fn series_to_values(
|
||||
|
||||
Ok(values)
|
||||
}
|
||||
DataType::Decimal(_precision, _scale) => {
|
||||
let casted = series
|
||||
.cast(&DataType::Float64)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Errors casting decimal column to float".into(),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: Some(e.to_string()),
|
||||
inner: vec![],
|
||||
})?;
|
||||
series_to_values(&casted, maybe_from_row, maybe_size, span)
|
||||
}
|
||||
e => Err(ShellError::GenericError {
|
||||
error: "Error creating Dataframe".into(),
|
||||
msg: "".to_string(),
|
||||
|
11
src/run.rs
11
src/run.rs
@ -26,9 +26,6 @@ pub(crate) fn run_commands(
|
||||
let ask_to_create_config = nu_path::nu_config_dir().map_or(false, |p| !p.exists());
|
||||
|
||||
let mut stack = Stack::new();
|
||||
if stack.has_env_var(engine_state, "NU_DISABLE_IR") {
|
||||
stack.use_ir = false;
|
||||
}
|
||||
|
||||
// if the --no-config-file(-n) option is NOT passed, load the plugin file,
|
||||
// load the default env file or custom (depending on parsed_nu_cli_args.env_file),
|
||||
@ -119,10 +116,6 @@ pub(crate) fn run_file(
|
||||
trace!("run_file");
|
||||
let mut stack = Stack::new();
|
||||
|
||||
if stack.has_env_var(engine_state, "NU_DISABLE_IR") {
|
||||
stack.use_ir = false;
|
||||
}
|
||||
|
||||
// if the --no-config-file(-n) option is NOT passed, load the plugin file,
|
||||
// load the default env file or custom (depending on parsed_nu_cli_args.env_file),
|
||||
// and maybe a custom config file (depending on parsed_nu_cli_args.config_file)
|
||||
@ -191,10 +184,6 @@ pub(crate) fn run_repl(
|
||||
let mut stack = Stack::new();
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
if stack.has_env_var(engine_state, "NU_DISABLE_IR") {
|
||||
stack.use_ir = false;
|
||||
}
|
||||
|
||||
if parsed_nu_cli_args.no_config_file.is_none() {
|
||||
setup_config(
|
||||
engine_state,
|
||||
|
@ -236,11 +236,6 @@ pub fn nu_repl() {
|
||||
engine_state.add_env_var("PWD".into(), Value::test_string(cwd.to_string_lossy()));
|
||||
engine_state.add_env_var("PATH".into(), Value::test_string(""));
|
||||
|
||||
// Disable IR in tests if set
|
||||
if std::env::var_os("NU_DISABLE_IR").is_some() {
|
||||
Arc::make_mut(&mut top_stack).use_ir = false;
|
||||
}
|
||||
|
||||
let mut last_output = String::new();
|
||||
|
||||
load_standard_library(&mut engine_state).expect("Could not load the standard library.");
|
||||
|
@ -31,71 +31,43 @@ enum ExpectedOut<'a> {
|
||||
use self::ExpectedOut::*;
|
||||
|
||||
fn test_eval(source: &str, expected_out: ExpectedOut) {
|
||||
Playground::setup("test_eval_ast", |ast_dirs, _playground| {
|
||||
Playground::setup("test_eval_ir", |ir_dirs, _playground| {
|
||||
let actual_ast = nu!(
|
||||
cwd: ast_dirs.test(),
|
||||
use_ir: false,
|
||||
source,
|
||||
);
|
||||
let actual_ir = nu!(
|
||||
cwd: ir_dirs.test(),
|
||||
use_ir: true,
|
||||
source,
|
||||
);
|
||||
Playground::setup("test_eval", |dirs, _playground| {
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
source,
|
||||
);
|
||||
|
||||
match expected_out {
|
||||
Eq(eq) => {
|
||||
assert_eq!(actual_ast.out, eq);
|
||||
assert_eq!(actual_ir.out, eq);
|
||||
assert!(actual_ast.status.success());
|
||||
assert!(actual_ir.status.success());
|
||||
}
|
||||
Matches(regex) => {
|
||||
let compiled_regex = Regex::new(regex).expect("regex failed to compile");
|
||||
assert!(
|
||||
compiled_regex.is_match(&actual_ast.out),
|
||||
"AST eval out does not match: {}\n{}",
|
||||
regex,
|
||||
actual_ast.out
|
||||
);
|
||||
assert!(
|
||||
compiled_regex.is_match(&actual_ir.out),
|
||||
"IR eval out does not match: {}\n{}",
|
||||
regex,
|
||||
actual_ir.out,
|
||||
);
|
||||
assert!(actual_ast.status.success());
|
||||
assert!(actual_ir.status.success());
|
||||
}
|
||||
Error(regex) => {
|
||||
let compiled_regex = Regex::new(regex).expect("regex failed to compile");
|
||||
assert!(
|
||||
compiled_regex.is_match(&actual_ast.err),
|
||||
"AST eval err does not match: {}",
|
||||
regex
|
||||
);
|
||||
assert!(
|
||||
compiled_regex.is_match(&actual_ir.err),
|
||||
"IR eval err does not match: {}",
|
||||
regex
|
||||
);
|
||||
assert!(!actual_ast.status.success());
|
||||
assert!(!actual_ir.status.success());
|
||||
}
|
||||
FileEq(path, contents) => {
|
||||
let ast_contents = std::fs::read_to_string(ast_dirs.test().join(path))
|
||||
.expect("failed to read AST file");
|
||||
let ir_contents = std::fs::read_to_string(ir_dirs.test().join(path))
|
||||
.expect("failed to read IR file");
|
||||
assert_eq!(ast_contents.trim(), contents);
|
||||
assert_eq!(ir_contents.trim(), contents);
|
||||
assert!(actual_ast.status.success());
|
||||
assert!(actual_ir.status.success());
|
||||
}
|
||||
match expected_out {
|
||||
Eq(eq) => {
|
||||
assert_eq!(actual.out, eq);
|
||||
assert!(actual.status.success());
|
||||
}
|
||||
assert_eq!(actual_ast.out, actual_ir.out);
|
||||
})
|
||||
Matches(regex) => {
|
||||
let compiled_regex = Regex::new(regex).expect("regex failed to compile");
|
||||
assert!(
|
||||
compiled_regex.is_match(&actual.out),
|
||||
"eval out does not match: {}\n{}",
|
||||
regex,
|
||||
actual.out,
|
||||
);
|
||||
assert!(actual.status.success());
|
||||
}
|
||||
Error(regex) => {
|
||||
let compiled_regex = Regex::new(regex).expect("regex failed to compile");
|
||||
assert!(
|
||||
compiled_regex.is_match(&actual.err),
|
||||
"eval err does not match: {}",
|
||||
regex
|
||||
);
|
||||
assert!(!actual.status.success());
|
||||
}
|
||||
FileEq(path, contents) => {
|
||||
let read_contents =
|
||||
std::fs::read_to_string(dirs.test().join(path)).expect("failed to read file");
|
||||
assert_eq!(read_contents.trim(), contents);
|
||||
assert!(actual.status.success());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -131,9 +131,9 @@ fn command_not_found_error_suggests_typo_fix() {
|
||||
#[test]
|
||||
fn command_not_found_error_recognizes_non_executable_file() {
|
||||
let actual = nu!("./Cargo.toml");
|
||||
assert!(actual
|
||||
.err
|
||||
.contains("is neither a Nushell built-in or a known external command"));
|
||||
assert!(actual.err.contains(
|
||||
"refers to a file that is not executable. Did you forget to set execute permissions?"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
Loading…
Reference in New Issue
Block a user