mirror of
https://github.com/nushell/nushell.git
synced 2025-04-29 07:34:28 +02:00
Merge branch 'main' into ecow-record
This commit is contained in:
commit
b15d893645
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@ -168,7 +168,7 @@ jobs:
|
||||
# Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
|
||||
# instead of 14 GB) which is too little for us right now.
|
||||
#
|
||||
# Failure occuring with clippy for rust 1.77.2
|
||||
# Failure occurring with clippy for rust 1.77.2
|
||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
779
Cargo.lock
generated
779
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -65,14 +65,14 @@ members = [
|
||||
alphanumeric-sort = "1.5"
|
||||
ansi-str = "0.8"
|
||||
anyhow = "1.0.82"
|
||||
base64 = "0.22"
|
||||
base64 = "0.22.1"
|
||||
bracoxide = "0.1.2"
|
||||
brotli = "5.0"
|
||||
byteorder = "1.5"
|
||||
bytesize = "1.3"
|
||||
calamine = "0.24.0"
|
||||
chardetng = "0.1.17"
|
||||
chrono = { default-features = false, version = "0.4" }
|
||||
chrono = { default-features = false, version = "0.4.34" }
|
||||
chrono-humanize = "0.2.3"
|
||||
chrono-tz = "0.8"
|
||||
crossbeam-channel = "0.5.8"
|
||||
@ -94,7 +94,7 @@ heck = "0.5.0"
|
||||
human-date-parser = "0.1.1"
|
||||
indexmap = "2.2"
|
||||
indicatif = "0.17"
|
||||
interprocess = "2.0.0"
|
||||
interprocess = "2.0.1"
|
||||
is_executable = "1.0"
|
||||
itertools = "0.12"
|
||||
libc = "0.2"
|
||||
@ -227,7 +227,7 @@ nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.93.1"
|
||||
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.93.1" }
|
||||
assert_cmd = "2.0"
|
||||
dirs-next = { workspace = true }
|
||||
divan = "0.1.14"
|
||||
tango-bench = "0.5"
|
||||
pretty_assertions = { workspace = true }
|
||||
rstest = { workspace = true, default-features = false }
|
||||
serial_test = "3.1"
|
||||
|
@ -1,7 +1,7 @@
|
||||
use nu_cli::{eval_source, evaluate_commands};
|
||||
use nu_parser::parse;
|
||||
use nu_plugin_core::{Encoder, EncodingType};
|
||||
use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
|
||||
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
eval_const::create_nu_constant,
|
||||
@ -9,12 +9,14 @@ use nu_protocol::{
|
||||
};
|
||||
use nu_std::load_standard_library;
|
||||
use nu_utils::{get_default_config, get_default_env};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
fn main() {
|
||||
// Run registered benchmarks.
|
||||
divan::main();
|
||||
}
|
||||
use std::hint::black_box;
|
||||
|
||||
use tango_bench::{benchmark_fn, tango_benchmarks, tango_main, IntoBenchmarks};
|
||||
|
||||
fn load_bench_commands() -> EngineState {
|
||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||
@ -57,42 +59,6 @@ fn setup_engine() -> EngineState {
|
||||
engine_state
|
||||
}
|
||||
|
||||
fn bench_command(bencher: divan::Bencher, scaled_command: String) {
|
||||
bench_command_with_custom_stack_and_engine(
|
||||
bencher,
|
||||
scaled_command,
|
||||
Stack::new(),
|
||||
setup_engine(),
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_command_with_custom_stack_and_engine(
|
||||
bencher: divan::Bencher,
|
||||
scaled_command: String,
|
||||
stack: nu_protocol::engine::Stack,
|
||||
mut engine: EngineState,
|
||||
) {
|
||||
load_standard_library(&mut engine).unwrap();
|
||||
let commands = Spanned {
|
||||
span: Span::unknown(),
|
||||
item: scaled_command,
|
||||
};
|
||||
|
||||
bencher
|
||||
.with_inputs(|| engine.clone())
|
||||
.bench_values(|mut engine| {
|
||||
evaluate_commands(
|
||||
&commands,
|
||||
&mut engine,
|
||||
&mut stack.clone(),
|
||||
PipelineData::empty(),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
})
|
||||
}
|
||||
|
||||
fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
|
||||
let mut engine = setup_engine();
|
||||
let commands = Spanned {
|
||||
@ -114,261 +80,6 @@ fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
|
||||
(stack, engine)
|
||||
}
|
||||
|
||||
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
|
||||
// When the *_benchmarks functions were in different files, `cargo bench` would build
|
||||
// an executable for every single one - incredibly slowly. Would be nice to figure out
|
||||
// a way to split things up again.
|
||||
|
||||
#[divan::bench]
|
||||
fn load_standard_lib(bencher: divan::Bencher) {
|
||||
let engine = setup_engine();
|
||||
bencher
|
||||
.with_inputs(|| engine.clone())
|
||||
.bench_values(|mut engine| {
|
||||
load_standard_library(&mut engine).unwrap();
|
||||
})
|
||||
}
|
||||
|
||||
#[divan::bench_group]
|
||||
mod record {
|
||||
|
||||
use super::*;
|
||||
|
||||
fn create_flat_record_string(n: i32) -> String {
|
||||
let mut s = String::from("let record = {");
|
||||
for i in 0..n {
|
||||
s.push_str(&format!("col_{}: {}", i, i));
|
||||
if i < n - 1 {
|
||||
s.push_str(", ");
|
||||
}
|
||||
}
|
||||
s.push('}');
|
||||
s
|
||||
}
|
||||
|
||||
fn create_nested_record_string(depth: i32) -> String {
|
||||
let mut s = String::from("let record = {");
|
||||
for _ in 0..depth {
|
||||
s.push_str("col: {");
|
||||
}
|
||||
s.push_str("col_final: 0");
|
||||
for _ in 0..depth {
|
||||
s.push('}');
|
||||
}
|
||||
s.push('}');
|
||||
s
|
||||
}
|
||||
|
||||
#[divan::bench(args = [1, 10, 100, 1000])]
|
||||
fn create(bencher: divan::Bencher, n: i32) {
|
||||
bench_command(bencher, create_flat_record_string(n));
|
||||
}
|
||||
|
||||
#[divan::bench(args = [1, 10, 100, 1000])]
|
||||
fn flat_access(bencher: divan::Bencher, n: i32) {
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&create_flat_record_string(n));
|
||||
bench_command_with_custom_stack_and_engine(
|
||||
bencher,
|
||||
"$record.col_0 | ignore".to_string(),
|
||||
stack,
|
||||
engine,
|
||||
);
|
||||
}
|
||||
|
||||
#[divan::bench(args = [1, 2, 4, 8, 16, 32, 64, 128])]
|
||||
fn nest_access(bencher: divan::Bencher, depth: i32) {
|
||||
let (stack, engine) =
|
||||
setup_stack_and_engine_from_command(&create_nested_record_string(depth));
|
||||
let nested_access = ".col".repeat(depth as usize);
|
||||
bench_command_with_custom_stack_and_engine(
|
||||
bencher,
|
||||
format!("$record{} | ignore", nested_access),
|
||||
stack,
|
||||
engine,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[divan::bench_group]
|
||||
mod table {
|
||||
|
||||
use super::*;
|
||||
|
||||
fn create_example_table_nrows(n: i32) -> String {
|
||||
let mut s = String::from("let table = [[foo bar baz]; ");
|
||||
for i in 0..n {
|
||||
s.push_str(&format!("[0, 1, {i}]"));
|
||||
if i < n - 1 {
|
||||
s.push_str(", ");
|
||||
}
|
||||
}
|
||||
s.push(']');
|
||||
s
|
||||
}
|
||||
|
||||
#[divan::bench(args = [1, 10, 100, 1000])]
|
||||
fn create(bencher: divan::Bencher, n: i32) {
|
||||
bench_command(bencher, create_example_table_nrows(n));
|
||||
}
|
||||
|
||||
#[divan::bench(args = [1, 10, 100, 1000])]
|
||||
fn get(bencher: divan::Bencher, n: i32) {
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&create_example_table_nrows(n));
|
||||
bench_command_with_custom_stack_and_engine(
|
||||
bencher,
|
||||
"$table | get bar | math sum | ignore".to_string(),
|
||||
stack,
|
||||
engine,
|
||||
);
|
||||
}
|
||||
|
||||
#[divan::bench(args = [1, 10, 100, 1000])]
|
||||
fn select(bencher: divan::Bencher, n: i32) {
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&create_example_table_nrows(n));
|
||||
bench_command_with_custom_stack_and_engine(
|
||||
bencher,
|
||||
"$table | select foo baz | ignore".to_string(),
|
||||
stack,
|
||||
engine,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[divan::bench_group]
|
||||
mod eval_commands {
|
||||
use super::*;
|
||||
|
||||
#[divan::bench(args = [100, 1_000, 10_000])]
|
||||
fn interleave(bencher: divan::Bencher, n: i32) {
|
||||
bench_command(
|
||||
bencher,
|
||||
format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
)
|
||||
}
|
||||
|
||||
#[divan::bench(args = [100, 1_000, 10_000])]
|
||||
fn interleave_with_ctrlc(bencher: divan::Bencher, n: i32) {
|
||||
let mut engine = setup_engine();
|
||||
engine.ctrlc = Some(std::sync::Arc::new(std::sync::atomic::AtomicBool::new(
|
||||
false,
|
||||
)));
|
||||
load_standard_library(&mut engine).unwrap();
|
||||
let commands = Spanned {
|
||||
span: Span::unknown(),
|
||||
item: format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
};
|
||||
|
||||
bencher
|
||||
.with_inputs(|| engine.clone())
|
||||
.bench_values(|mut engine| {
|
||||
evaluate_commands(
|
||||
&commands,
|
||||
&mut engine,
|
||||
&mut nu_protocol::engine::Stack::new(),
|
||||
PipelineData::empty(),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
})
|
||||
}
|
||||
|
||||
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
|
||||
fn for_range(bencher: divan::Bencher, n: i32) {
|
||||
bench_command(bencher, format!("(for $x in (1..{}) {{ sleep 50ns }})", n))
|
||||
}
|
||||
|
||||
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
|
||||
fn each(bencher: divan::Bencher, n: i32) {
|
||||
bench_command(
|
||||
bencher,
|
||||
format!("(1..{}) | each {{|_| sleep 50ns }} | ignore", n),
|
||||
)
|
||||
}
|
||||
|
||||
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
|
||||
fn par_each_1t(bencher: divan::Bencher, n: i32) {
|
||||
bench_command(
|
||||
bencher,
|
||||
format!("(1..{}) | par-each -t 1 {{|_| sleep 50ns }} | ignore", n),
|
||||
)
|
||||
}
|
||||
|
||||
#[divan::bench(args = [1, 5, 10, 100, 1_000])]
|
||||
fn par_each_2t(bencher: divan::Bencher, n: i32) {
|
||||
bench_command(
|
||||
bencher,
|
||||
format!("(1..{}) | par-each -t 2 {{|_| sleep 50ns }} | ignore", n),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[divan::bench_group()]
|
||||
mod parser_benchmarks {
|
||||
use super::*;
|
||||
|
||||
#[divan::bench()]
|
||||
fn parse_default_config_file(bencher: divan::Bencher) {
|
||||
let engine_state = setup_engine();
|
||||
let default_env = get_default_config().as_bytes();
|
||||
|
||||
bencher
|
||||
.with_inputs(|| nu_protocol::engine::StateWorkingSet::new(&engine_state))
|
||||
.bench_refs(|working_set| parse(working_set, None, default_env, false))
|
||||
}
|
||||
|
||||
#[divan::bench()]
|
||||
fn parse_default_env_file(bencher: divan::Bencher) {
|
||||
let engine_state = setup_engine();
|
||||
let default_env = get_default_env().as_bytes();
|
||||
|
||||
bencher
|
||||
.with_inputs(|| nu_protocol::engine::StateWorkingSet::new(&engine_state))
|
||||
.bench_refs(|working_set| parse(working_set, None, default_env, false))
|
||||
}
|
||||
}
|
||||
|
||||
#[divan::bench_group()]
|
||||
mod eval_benchmarks {
|
||||
use super::*;
|
||||
|
||||
#[divan::bench()]
|
||||
fn eval_default_env(bencher: divan::Bencher) {
|
||||
let default_env = get_default_env().as_bytes();
|
||||
let fname = "default_env.nu";
|
||||
bencher
|
||||
.with_inputs(|| (setup_engine(), nu_protocol::engine::Stack::new()))
|
||||
.bench_values(|(mut engine_state, mut stack)| {
|
||||
eval_source(
|
||||
&mut engine_state,
|
||||
&mut stack,
|
||||
default_env,
|
||||
fname,
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[divan::bench()]
|
||||
fn eval_default_config(bencher: divan::Bencher) {
|
||||
let default_env = get_default_config().as_bytes();
|
||||
let fname = "default_config.nu";
|
||||
bencher
|
||||
.with_inputs(|| (setup_engine(), nu_protocol::engine::Stack::new()))
|
||||
.bench_values(|(mut engine_state, mut stack)| {
|
||||
eval_source(
|
||||
&mut engine_state,
|
||||
&mut stack,
|
||||
default_env,
|
||||
fname,
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// generate a new table data with `row_cnt` rows, `col_cnt` columns.
|
||||
fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
|
||||
let record = Value::test_record(
|
||||
@ -380,76 +91,423 @@ fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
|
||||
Value::list(vec![record; row_cnt], Span::test_data())
|
||||
}
|
||||
|
||||
#[divan::bench_group()]
|
||||
mod encoding_benchmarks {
|
||||
use super::*;
|
||||
|
||||
#[divan::bench(args = [(100, 5), (10000, 15)])]
|
||||
fn json_encode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
|
||||
let test_data = PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
);
|
||||
let encoder = EncodingType::try_from_bytes(b"json").unwrap();
|
||||
bencher
|
||||
.with_inputs(Vec::new)
|
||||
.bench_values(|mut res| encoder.encode(&test_data, &mut res))
|
||||
}
|
||||
|
||||
#[divan::bench(args = [(100, 5), (10000, 15)])]
|
||||
fn msgpack_encode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
|
||||
let test_data = PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
);
|
||||
let encoder = EncodingType::try_from_bytes(b"msgpack").unwrap();
|
||||
bencher
|
||||
.with_inputs(Vec::new)
|
||||
.bench_values(|mut res| encoder.encode(&test_data, &mut res))
|
||||
}
|
||||
fn bench_command(
|
||||
name: &str,
|
||||
command: &str,
|
||||
stack: Stack,
|
||||
engine: EngineState,
|
||||
) -> impl IntoBenchmarks {
|
||||
let commands = Spanned {
|
||||
span: Span::unknown(),
|
||||
item: command.to_string(),
|
||||
};
|
||||
[benchmark_fn(name, move |b| {
|
||||
let commands = commands.clone();
|
||||
let stack = stack.clone();
|
||||
let engine = engine.clone();
|
||||
b.iter(move || {
|
||||
let mut stack = stack.clone();
|
||||
let mut engine = engine.clone();
|
||||
black_box(
|
||||
evaluate_commands(
|
||||
&commands,
|
||||
&mut engine,
|
||||
&mut stack,
|
||||
PipelineData::empty(),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
})
|
||||
})]
|
||||
}
|
||||
|
||||
#[divan::bench_group()]
|
||||
mod decoding_benchmarks {
|
||||
use super::*;
|
||||
|
||||
#[divan::bench(args = [(100, 5), (10000, 15)])]
|
||||
fn json_decode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
|
||||
let test_data = PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
);
|
||||
let encoder = EncodingType::try_from_bytes(b"json").unwrap();
|
||||
let mut res = vec![];
|
||||
encoder.encode(&test_data, &mut res).unwrap();
|
||||
bencher
|
||||
.with_inputs(|| {
|
||||
let mut binary_data = std::io::Cursor::new(res.clone());
|
||||
binary_data.set_position(0);
|
||||
binary_data
|
||||
})
|
||||
.bench_values(|mut binary_data| -> Result<Option<PluginOutput>, _> {
|
||||
encoder.decode(&mut binary_data)
|
||||
})
|
||||
}
|
||||
|
||||
#[divan::bench(args = [(100, 5), (10000, 15)])]
|
||||
fn msgpack_decode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
|
||||
let test_data = PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
);
|
||||
let encoder = EncodingType::try_from_bytes(b"msgpack").unwrap();
|
||||
let mut res = vec![];
|
||||
encoder.encode(&test_data, &mut res).unwrap();
|
||||
bencher
|
||||
.with_inputs(|| {
|
||||
let mut binary_data = std::io::Cursor::new(res.clone());
|
||||
binary_data.set_position(0);
|
||||
binary_data
|
||||
})
|
||||
.bench_values(|mut binary_data| -> Result<Option<PluginOutput>, _> {
|
||||
encoder.decode(&mut binary_data)
|
||||
})
|
||||
}
|
||||
fn bench_eval_source(
|
||||
name: &str,
|
||||
fname: String,
|
||||
source: Vec<u8>,
|
||||
stack: Stack,
|
||||
engine: EngineState,
|
||||
) -> impl IntoBenchmarks {
|
||||
[benchmark_fn(name, move |b| {
|
||||
let stack = stack.clone();
|
||||
let engine = engine.clone();
|
||||
let fname = fname.clone();
|
||||
let source = source.clone();
|
||||
b.iter(move || {
|
||||
let mut stack = stack.clone();
|
||||
let mut engine = engine.clone();
|
||||
let fname: &str = &fname.clone();
|
||||
let source: &[u8] = &source.clone();
|
||||
black_box(eval_source(
|
||||
&mut engine,
|
||||
&mut stack,
|
||||
source,
|
||||
fname,
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
));
|
||||
})
|
||||
})]
|
||||
}
|
||||
|
||||
/// Load the standard library into the engine.
|
||||
fn bench_load_standard_lib() -> impl IntoBenchmarks {
|
||||
[benchmark_fn("load_standard_lib", move |b| {
|
||||
let engine = setup_engine();
|
||||
b.iter(move || {
|
||||
let mut engine = engine.clone();
|
||||
load_standard_library(&mut engine)
|
||||
})
|
||||
})]
|
||||
}
|
||||
|
||||
fn create_flat_record_string(n: i32) -> String {
|
||||
let mut s = String::from("let record = {");
|
||||
for i in 0..n {
|
||||
s.push_str(&format!("col_{}: {}", i, i));
|
||||
if i < n - 1 {
|
||||
s.push_str(", ");
|
||||
}
|
||||
}
|
||||
s.push('}');
|
||||
s
|
||||
}
|
||||
|
||||
fn create_nested_record_string(depth: i32) -> String {
|
||||
let mut s = String::from("let record = {");
|
||||
for _ in 0..depth {
|
||||
s.push_str("col: {");
|
||||
}
|
||||
s.push_str("col_final: 0");
|
||||
for _ in 0..depth {
|
||||
s.push('}');
|
||||
}
|
||||
s.push('}');
|
||||
s
|
||||
}
|
||||
|
||||
fn create_example_table_nrows(n: i32) -> String {
|
||||
let mut s = String::from("let table = [[foo bar baz]; ");
|
||||
for i in 0..n {
|
||||
s.push_str(&format!("[0, 1, {i}]"));
|
||||
if i < n - 1 {
|
||||
s.push_str(", ");
|
||||
}
|
||||
}
|
||||
s.push(']');
|
||||
s
|
||||
}
|
||||
|
||||
fn bench_record_create(n: i32) -> impl IntoBenchmarks {
|
||||
bench_command(
|
||||
&format!("record_create_{n}"),
|
||||
&create_flat_record_string(n),
|
||||
Stack::new(),
|
||||
setup_engine(),
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_record_flat_access(n: i32) -> impl IntoBenchmarks {
|
||||
let setup_command = create_flat_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
&format!("record_flat_access_{n}"),
|
||||
"$record.col_0 | ignore",
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_record_nested_access(n: i32) -> impl IntoBenchmarks {
|
||||
let setup_command = create_nested_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let nested_access = ".col".repeat(n as usize);
|
||||
bench_command(
|
||||
&format!("record_nested_access_{n}"),
|
||||
&format!("$record{} | ignore", nested_access),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_create(n: i32) -> impl IntoBenchmarks {
|
||||
bench_command(
|
||||
&format!("table_create_{n}"),
|
||||
&create_example_table_nrows(n),
|
||||
Stack::new(),
|
||||
setup_engine(),
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_get(n: i32) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
&format!("table_get_{n}"),
|
||||
"$table | get bar | math sum | ignore",
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_select(n: i32) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
&format!("table_select_{n}"),
|
||||
"$table | select foo baz | ignore",
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_interleave_{n}"),
|
||||
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave_with_ctrlc(n: i32) -> impl IntoBenchmarks {
|
||||
let mut engine = setup_engine();
|
||||
engine.ctrlc = Some(std::sync::Arc::new(std::sync::atomic::AtomicBool::new(
|
||||
false,
|
||||
)));
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_interleave_with_ctrlc_{n}"),
|
||||
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_for(n: i32) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_for_{n}"),
|
||||
&format!("(for $x in (1..{n}) {{ 1 }}) | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_each(n: i32) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_each_{n}"),
|
||||
&format!("(1..{n}) | each {{|_| 1 }} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_par_each(n: i32) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_par_each_{n}"),
|
||||
&format!("(1..{}) | par-each -t 2 {{|_| 1 }} | ignore", n),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_default_config() -> impl IntoBenchmarks {
|
||||
let default_env = get_default_config().as_bytes().to_vec();
|
||||
let fname = "default_config.nu".to_string();
|
||||
bench_eval_source(
|
||||
"eval_default_config",
|
||||
fname,
|
||||
default_env,
|
||||
Stack::new(),
|
||||
setup_engine(),
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_default_env() -> impl IntoBenchmarks {
|
||||
let default_env = get_default_env().as_bytes().to_vec();
|
||||
let fname = "default_env.nu".to_string();
|
||||
bench_eval_source(
|
||||
"eval_default_env",
|
||||
fname,
|
||||
default_env,
|
||||
Stack::new(),
|
||||
setup_engine(),
|
||||
)
|
||||
}
|
||||
|
||||
fn encode_json(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
||||
let test_data = Rc::new(PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
));
|
||||
let encoder = Rc::new(EncodingType::try_from_bytes(b"json").unwrap());
|
||||
|
||||
[benchmark_fn(
|
||||
format!("encode_json_{}_{}", row_cnt, col_cnt),
|
||||
move |b| {
|
||||
let encoder = encoder.clone();
|
||||
let test_data = test_data.clone();
|
||||
b.iter(move || {
|
||||
let mut res = Vec::new();
|
||||
encoder.encode(&*test_data, &mut res).unwrap();
|
||||
})
|
||||
},
|
||||
)]
|
||||
}
|
||||
|
||||
fn encode_msgpack(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
||||
let test_data = Rc::new(PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
));
|
||||
let encoder = Rc::new(EncodingType::try_from_bytes(b"msgpack").unwrap());
|
||||
|
||||
[benchmark_fn(
|
||||
format!("encode_msgpack_{}_{}", row_cnt, col_cnt),
|
||||
move |b| {
|
||||
let encoder = encoder.clone();
|
||||
let test_data = test_data.clone();
|
||||
b.iter(move || {
|
||||
let mut res = Vec::new();
|
||||
encoder.encode(&*test_data, &mut res).unwrap();
|
||||
})
|
||||
},
|
||||
)]
|
||||
}
|
||||
|
||||
fn decode_json(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
||||
let test_data = PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
);
|
||||
let encoder = EncodingType::try_from_bytes(b"json").unwrap();
|
||||
let mut res = vec![];
|
||||
encoder.encode(&test_data, &mut res).unwrap();
|
||||
|
||||
[benchmark_fn(
|
||||
format!("decode_json_{}_{}", row_cnt, col_cnt),
|
||||
move |b| {
|
||||
let res = res.clone();
|
||||
b.iter(move || {
|
||||
let mut binary_data = std::io::Cursor::new(res.clone());
|
||||
binary_data.set_position(0);
|
||||
let _: Result<Option<PluginOutput>, _> =
|
||||
black_box(encoder.decode(&mut binary_data));
|
||||
})
|
||||
},
|
||||
)]
|
||||
}
|
||||
|
||||
fn decode_msgpack(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
||||
let test_data = PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
);
|
||||
let encoder = EncodingType::try_from_bytes(b"msgpack").unwrap();
|
||||
let mut res = vec![];
|
||||
encoder.encode(&test_data, &mut res).unwrap();
|
||||
|
||||
[benchmark_fn(
|
||||
format!("decode_msgpack_{}_{}", row_cnt, col_cnt),
|
||||
move |b| {
|
||||
let res = res.clone();
|
||||
b.iter(move || {
|
||||
let mut binary_data = std::io::Cursor::new(res.clone());
|
||||
binary_data.set_position(0);
|
||||
let _: Result<Option<PluginOutput>, _> =
|
||||
black_box(encoder.decode(&mut binary_data));
|
||||
})
|
||||
},
|
||||
)]
|
||||
}
|
||||
|
||||
tango_benchmarks!(
|
||||
bench_load_standard_lib(),
|
||||
// Data types
|
||||
// Record
|
||||
bench_record_create(1),
|
||||
bench_record_create(10),
|
||||
bench_record_create(100),
|
||||
bench_record_create(1_000),
|
||||
bench_record_flat_access(1),
|
||||
bench_record_flat_access(10),
|
||||
bench_record_flat_access(100),
|
||||
bench_record_flat_access(1_000),
|
||||
bench_record_nested_access(1),
|
||||
bench_record_nested_access(2),
|
||||
bench_record_nested_access(4),
|
||||
bench_record_nested_access(8),
|
||||
bench_record_nested_access(16),
|
||||
bench_record_nested_access(32),
|
||||
bench_record_nested_access(64),
|
||||
bench_record_nested_access(128),
|
||||
// Table
|
||||
bench_table_create(1),
|
||||
bench_table_create(10),
|
||||
bench_table_create(100),
|
||||
bench_table_create(1_000),
|
||||
bench_table_get(1),
|
||||
bench_table_get(10),
|
||||
bench_table_get(100),
|
||||
bench_table_get(1_000),
|
||||
bench_table_select(1),
|
||||
bench_table_select(10),
|
||||
bench_table_select(100),
|
||||
bench_table_select(1_000),
|
||||
// Eval
|
||||
// Interleave
|
||||
bench_eval_interleave(100),
|
||||
bench_eval_interleave(1_000),
|
||||
bench_eval_interleave(10_000),
|
||||
bench_eval_interleave_with_ctrlc(100),
|
||||
bench_eval_interleave_with_ctrlc(1_000),
|
||||
bench_eval_interleave_with_ctrlc(10_000),
|
||||
// For
|
||||
bench_eval_for(1),
|
||||
bench_eval_for(10),
|
||||
bench_eval_for(100),
|
||||
bench_eval_for(1_000),
|
||||
bench_eval_for(10_000),
|
||||
// Each
|
||||
bench_eval_each(1),
|
||||
bench_eval_each(10),
|
||||
bench_eval_each(100),
|
||||
bench_eval_each(1_000),
|
||||
bench_eval_each(10_000),
|
||||
// Par-Each
|
||||
bench_eval_par_each(1),
|
||||
bench_eval_par_each(10),
|
||||
bench_eval_par_each(100),
|
||||
bench_eval_par_each(1_000),
|
||||
bench_eval_par_each(10_000),
|
||||
// Config
|
||||
bench_eval_default_config(),
|
||||
// Env
|
||||
bench_eval_default_env(),
|
||||
// Encode
|
||||
// Json
|
||||
encode_json(100, 5),
|
||||
encode_json(10000, 15),
|
||||
// MsgPack
|
||||
encode_msgpack(100, 5),
|
||||
encode_msgpack(10000, 15),
|
||||
// Decode
|
||||
// Json
|
||||
decode_json(100, 5),
|
||||
decode_json(10000, 15),
|
||||
// MsgPack
|
||||
decode_msgpack(100, 5),
|
||||
decode_msgpack(10000, 15)
|
||||
);
|
||||
|
||||
tango_main!();
|
||||
|
@ -107,7 +107,7 @@ impl Command for History {
|
||||
file: history_path.display().to_string(),
|
||||
span: head,
|
||||
})?
|
||||
.into_pipeline_data(ctrlc)),
|
||||
.into_pipeline_data(head, ctrlc)),
|
||||
HistoryFileFormat::Sqlite => Ok(history_reader
|
||||
.and_then(|h| {
|
||||
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||
@ -122,7 +122,7 @@ impl Command for History {
|
||||
file: history_path.display().to_string(),
|
||||
span: head,
|
||||
})?
|
||||
.into_pipeline_data(ctrlc)),
|
||||
.into_pipeline_data(head, ctrlc)),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -7,40 +7,65 @@ use nu_protocol::{
|
||||
Span,
|
||||
};
|
||||
use nu_utils::get_ls_colors;
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP},
|
||||
use std::path::{
|
||||
is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR,
|
||||
};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct PathBuiltFromString {
|
||||
parts: Vec<String>,
|
||||
isdir: bool,
|
||||
}
|
||||
|
||||
fn complete_rec(
|
||||
partial: &[String],
|
||||
partial: &[&str],
|
||||
built: &PathBuiltFromString,
|
||||
cwd: &Path,
|
||||
options: &CompletionOptions,
|
||||
dir: bool,
|
||||
isdir: bool,
|
||||
) -> Vec<PathBuf> {
|
||||
) -> Vec<PathBuiltFromString> {
|
||||
let mut completions = vec![];
|
||||
|
||||
if let Ok(result) = cwd.read_dir() {
|
||||
for entry in result.filter_map(|e| e.ok()) {
|
||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||
let path = entry.path();
|
||||
if let Some((&base, rest)) = partial.split_first() {
|
||||
if (base == "." || base == "..") && (isdir || !rest.is_empty()) {
|
||||
let mut built = built.clone();
|
||||
built.parts.push(base.to_string());
|
||||
built.isdir = true;
|
||||
return complete_rec(rest, &built, cwd, options, dir, isdir);
|
||||
}
|
||||
}
|
||||
|
||||
if !dir || path.is_dir() {
|
||||
match partial.first() {
|
||||
Some(base) if matches(base, &entry_name, options) => {
|
||||
let partial = &partial[1..];
|
||||
if !partial.is_empty() || isdir {
|
||||
completions.extend(complete_rec(partial, &path, options, dir, isdir));
|
||||
if entry_name.eq(base) {
|
||||
break;
|
||||
}
|
||||
let mut built_path = cwd.to_path_buf();
|
||||
for part in &built.parts {
|
||||
built_path.push(part);
|
||||
}
|
||||
|
||||
let Ok(result) = built_path.read_dir() else {
|
||||
return completions;
|
||||
};
|
||||
|
||||
for entry in result.filter_map(|e| e.ok()) {
|
||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||
let entry_isdir = entry.path().is_dir();
|
||||
let mut built = built.clone();
|
||||
built.parts.push(entry_name.clone());
|
||||
built.isdir = entry_isdir;
|
||||
|
||||
if !dir || entry_isdir {
|
||||
match partial.split_first() {
|
||||
Some((base, rest)) => {
|
||||
if matches(base, &entry_name, options) {
|
||||
if !rest.is_empty() || isdir {
|
||||
completions
|
||||
.extend(complete_rec(rest, &built, cwd, options, dir, isdir));
|
||||
} else {
|
||||
completions.push(path)
|
||||
completions.push(built);
|
||||
}
|
||||
}
|
||||
None => completions.push(path),
|
||||
_ => {}
|
||||
}
|
||||
None => {
|
||||
completions.push(built);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -48,33 +73,23 @@ fn complete_rec(
|
||||
completions
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OriginalCwd {
|
||||
None,
|
||||
Home(PathBuf),
|
||||
Some(PathBuf),
|
||||
// referencing a single local file
|
||||
Local(PathBuf),
|
||||
Home,
|
||||
Prefix(String),
|
||||
}
|
||||
|
||||
impl OriginalCwd {
|
||||
fn apply(&self, p: &Path) -> String {
|
||||
let mut ret = match self {
|
||||
Self::None => p.to_string_lossy().into_owned(),
|
||||
Self::Some(base) => pathdiff::diff_paths(p, base)
|
||||
.unwrap_or(p.to_path_buf())
|
||||
.to_string_lossy()
|
||||
.into_owned(),
|
||||
Self::Home(home) => match p.strip_prefix(home) {
|
||||
Ok(suffix) => format!("~{}{}", SEP, suffix.to_string_lossy()),
|
||||
_ => p.to_string_lossy().into_owned(),
|
||||
},
|
||||
Self::Local(base) => Path::new(".")
|
||||
.join(pathdiff::diff_paths(p, base).unwrap_or(p.to_path_buf()))
|
||||
.to_string_lossy()
|
||||
.into_owned(),
|
||||
fn apply(&self, mut p: PathBuiltFromString) -> String {
|
||||
match self {
|
||||
Self::None => {}
|
||||
Self::Home => p.parts.insert(0, "~".to_string()),
|
||||
Self::Prefix(s) => p.parts.insert(0, s.clone()),
|
||||
};
|
||||
|
||||
if p.is_dir() {
|
||||
let mut ret = p.parts.join(MAIN_SEPARATOR_STR);
|
||||
if p.isdir {
|
||||
ret.push(SEP);
|
||||
}
|
||||
ret
|
||||
@ -116,79 +131,67 @@ pub fn complete_item(
|
||||
};
|
||||
get_ls_colors(ls_colors_env_str)
|
||||
});
|
||||
|
||||
let mut cwd = cwd_pathbuf.clone();
|
||||
let mut prefix_len = 0;
|
||||
let mut original_cwd = OriginalCwd::None;
|
||||
let mut components_vec: Vec<Component> = Path::new(&partial).components().collect();
|
||||
|
||||
// Path components that end with a single "." get normalized away,
|
||||
// so if the partial path ends in a literal "." we must add it back in manually
|
||||
if partial.ends_with('.') && partial.len() > 1 {
|
||||
components_vec.push(Component::Normal(OsStr::new(".")));
|
||||
};
|
||||
let mut components = components_vec.into_iter().peekable();
|
||||
|
||||
let mut cwd = match components.peek().cloned() {
|
||||
let mut components = Path::new(&partial).components().peekable();
|
||||
match components.peek().cloned() {
|
||||
Some(c @ Component::Prefix(..)) => {
|
||||
// windows only by definition
|
||||
components.next();
|
||||
if let Some(Component::RootDir) = components.peek().cloned() {
|
||||
components.next();
|
||||
};
|
||||
[c, Component::RootDir].iter().collect()
|
||||
cwd = [c, Component::RootDir].iter().collect();
|
||||
prefix_len = c.as_os_str().len();
|
||||
original_cwd = OriginalCwd::Prefix(c.as_os_str().to_string_lossy().into_owned());
|
||||
}
|
||||
Some(c @ Component::RootDir) => {
|
||||
components.next();
|
||||
PathBuf::from(c.as_os_str())
|
||||
// This is kind of a hack. When joining an empty string with the rest,
|
||||
// we add the slash automagically
|
||||
cwd = PathBuf::from(c.as_os_str());
|
||||
prefix_len = 1;
|
||||
original_cwd = OriginalCwd::Prefix(String::new());
|
||||
}
|
||||
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
|
||||
components.next();
|
||||
original_cwd = OriginalCwd::Home(home_dir().unwrap_or(cwd_pathbuf.clone()));
|
||||
home_dir().unwrap_or(cwd_pathbuf)
|
||||
}
|
||||
Some(Component::CurDir) => {
|
||||
components.next();
|
||||
original_cwd = match components.peek().cloned() {
|
||||
Some(Component::Normal(_)) | None => OriginalCwd::Local(cwd_pathbuf.clone()),
|
||||
_ => OriginalCwd::Some(cwd_pathbuf.clone()),
|
||||
};
|
||||
cwd_pathbuf
|
||||
}
|
||||
_ => {
|
||||
original_cwd = OriginalCwd::Some(cwd_pathbuf.clone());
|
||||
cwd_pathbuf
|
||||
cwd = home_dir().unwrap_or(cwd_pathbuf);
|
||||
prefix_len = 1;
|
||||
original_cwd = OriginalCwd::Home;
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
|
||||
let mut partial = vec![];
|
||||
let after_prefix = &partial[prefix_len..];
|
||||
let partial: Vec<_> = after_prefix
|
||||
.strip_prefix(is_separator)
|
||||
.unwrap_or(after_prefix)
|
||||
.split(is_separator)
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect();
|
||||
|
||||
for component in components {
|
||||
match component {
|
||||
Component::Prefix(..) => unreachable!(),
|
||||
Component::RootDir => unreachable!(),
|
||||
Component::CurDir => {}
|
||||
Component::ParentDir => {
|
||||
if partial.pop().is_none() {
|
||||
cwd.pop();
|
||||
}
|
||||
}
|
||||
Component::Normal(c) => partial.push(c.to_string_lossy().into_owned()),
|
||||
}
|
||||
}
|
||||
|
||||
complete_rec(partial.as_slice(), &cwd, options, want_directory, isdir)
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
let path = original_cwd.apply(&p);
|
||||
let style = ls_colors.as_ref().map(|lsc| {
|
||||
lsc.style_for_path_with_metadata(
|
||||
&path,
|
||||
std::fs::symlink_metadata(&path).ok().as_ref(),
|
||||
)
|
||||
complete_rec(
|
||||
partial.as_slice(),
|
||||
&PathBuiltFromString::default(),
|
||||
&cwd,
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
)
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
let path = original_cwd.apply(p);
|
||||
let style = ls_colors.as_ref().map(|lsc| {
|
||||
lsc.style_for_path_with_metadata(&path, std::fs::symlink_metadata(&path).ok().as_ref())
|
||||
.map(lscolors::Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default()
|
||||
});
|
||||
(span, escape_path(path, want_directory), style)
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
(span, escape_path(path, want_directory), style)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Fix files or folders with quotes or hashes
|
||||
|
@ -88,7 +88,7 @@ impl Highlighter for NuHighlighter {
|
||||
.to_string();
|
||||
|
||||
let mut add_colored_token = |shape: &FlatShape, text: String| {
|
||||
output.push((get_shape_color(shape.to_string(), &self.config), text));
|
||||
output.push((get_shape_color(shape.as_str(), &self.config), text));
|
||||
};
|
||||
|
||||
match shape.1 {
|
||||
@ -128,7 +128,7 @@ impl Highlighter for NuHighlighter {
|
||||
let start = part.start - span.start;
|
||||
let end = part.end - span.start;
|
||||
let text = next_token[start..end].to_string();
|
||||
let mut style = get_shape_color(shape.to_string(), &self.config);
|
||||
let mut style = get_shape_color(shape.as_str(), &self.config);
|
||||
if highlight {
|
||||
style = get_matching_brackets_style(style, &self.config);
|
||||
}
|
||||
|
@ -334,7 +334,26 @@ fn partial_completions() {
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![file(dir.join("final_partial").join("somefile"))];
|
||||
let expected_paths: Vec<String> = vec![
|
||||
file(
|
||||
dir.join("partial_a")
|
||||
.join("..")
|
||||
.join("final_partial")
|
||||
.join("somefile"),
|
||||
),
|
||||
file(
|
||||
dir.join("partial_b")
|
||||
.join("..")
|
||||
.join("final_partial")
|
||||
.join("somefile"),
|
||||
),
|
||||
file(
|
||||
dir.join("partial_c")
|
||||
.join("..")
|
||||
.join("final_partial")
|
||||
.join("somefile"),
|
||||
),
|
||||
];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
|
@ -87,7 +87,7 @@ impl Command for EachWhile {
|
||||
Err(_) => None,
|
||||
})
|
||||
.fuse()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
@ -108,7 +108,7 @@ impl Command for EachWhile {
|
||||
}
|
||||
})
|
||||
.fuse()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
// This match allows non-iterables to be accepted,
|
||||
// which is currently considered undesirable (Nov 2022).
|
||||
|
@ -108,7 +108,7 @@ impl Command for UpdateCells {
|
||||
columns,
|
||||
span: head,
|
||||
}
|
||||
.into_pipeline_data(engine_state.ctrlc.clone())
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone())
|
||||
.set_metadata(metadata))
|
||||
}
|
||||
}
|
||||
|
@ -238,10 +238,7 @@ fn format(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(PipelineData::ListStream(
|
||||
ListStream::from_stream(list.into_iter(), None),
|
||||
None,
|
||||
))
|
||||
Ok(ListStream::new(list.into_iter(), head_span, engine_state.ctrlc.clone()).into())
|
||||
}
|
||||
// Unwrapping this ShellError is a bit unfortunate.
|
||||
// Ideally, its Span would be preserved.
|
||||
|
@ -62,6 +62,7 @@ impl Command for Do {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let block: Closure = call.req(engine_state, caller_stack, 0)?;
|
||||
let rest: Vec<Value> = call.rest(engine_state, caller_stack, 1)?;
|
||||
let ignore_all_errors = call.has_flag(engine_state, caller_stack, "ignore-errors")?;
|
||||
@ -75,7 +76,7 @@ impl Command for Do {
|
||||
let mut callee_stack = caller_stack.captures_to_stack_preserve_out_dest(block.captures);
|
||||
let block = engine_state.get_block(block.block_id);
|
||||
|
||||
bind_args_to(&mut callee_stack, &block.signature, rest, call.head)?;
|
||||
bind_args_to(&mut callee_stack, &block.signature, rest, head)?;
|
||||
let eval_block_with_early_return = get_eval_block_with_early_return(engine_state);
|
||||
let result = eval_block_with_early_return(engine_state, &mut callee_stack, block, input);
|
||||
|
||||
@ -117,7 +118,7 @@ impl Command for Do {
|
||||
None,
|
||||
)
|
||||
})
|
||||
.err_span(call.head)
|
||||
.err_span(head)
|
||||
})
|
||||
.transpose()?;
|
||||
|
||||
@ -148,13 +149,9 @@ impl Command for Do {
|
||||
None
|
||||
};
|
||||
|
||||
let mut exit_code_ctrlc = None;
|
||||
let exit_code: Vec<Value> = match exit_code {
|
||||
None => vec![],
|
||||
Some(exit_code_stream) => {
|
||||
exit_code_ctrlc.clone_from(&exit_code_stream.ctrlc);
|
||||
exit_code_stream.into_iter().collect()
|
||||
}
|
||||
Some(exit_code_stream) => exit_code_stream.into_iter().collect(),
|
||||
};
|
||||
if let Some(Value::Int { val: code, .. }) = exit_code.last() {
|
||||
if *code != 0 {
|
||||
@ -174,10 +171,7 @@ impl Command for Do {
|
||||
span,
|
||||
None,
|
||||
)),
|
||||
exit_code: Some(ListStream::from_stream(
|
||||
exit_code.into_iter(),
|
||||
exit_code_ctrlc,
|
||||
)),
|
||||
exit_code: Some(ListStream::new(exit_code.into_iter(), span, None)),
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
@ -205,21 +199,15 @@ impl Command for Do {
|
||||
Ok(PipelineData::Value(Value::Error { .. }, ..)) | Err(_) if ignore_shell_errors => {
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
Ok(PipelineData::ListStream(ls, metadata)) if ignore_shell_errors => {
|
||||
// check if there is a `Value::Error` in given list stream first.
|
||||
let mut values = vec![];
|
||||
let ctrlc = ls.ctrlc.clone();
|
||||
for v in ls {
|
||||
if let Value::Error { .. } = v {
|
||||
values.push(Value::nothing(call.head));
|
||||
Ok(PipelineData::ListStream(stream, metadata)) if ignore_shell_errors => {
|
||||
let stream = stream.map(move |value| {
|
||||
if let Value::Error { .. } = value {
|
||||
Value::nothing(head)
|
||||
} else {
|
||||
values.push(v)
|
||||
value
|
||||
}
|
||||
}
|
||||
Ok(PipelineData::ListStream(
|
||||
ListStream::from_stream(values.into_iter(), ctrlc),
|
||||
metadata,
|
||||
))
|
||||
});
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
}
|
||||
r => r,
|
||||
}
|
||||
|
@ -1,5 +1,4 @@
|
||||
use nu_engine::{command_prelude::*, get_eval_block, get_eval_expression};
|
||||
use nu_protocol::ListStream;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct For;
|
||||
@ -88,7 +87,11 @@ impl Command for For {
|
||||
let span = value.span();
|
||||
match value {
|
||||
Value::List { vals, .. } => {
|
||||
for (idx, x) in ListStream::from_stream(vals.into_iter(), ctrlc).enumerate() {
|
||||
for (idx, x) in vals.into_iter().enumerate() {
|
||||
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
|
||||
break;
|
||||
}
|
||||
|
||||
// with_env() is used here to ensure that each iteration uses
|
||||
// a different set of environment variables.
|
||||
// Hence, a 'cd' in the first loop won't affect the next loop.
|
||||
|
@ -26,13 +26,10 @@ impl Command for ScopeAliases {
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
let head = call.head;
|
||||
let mut scope_data = ScopeData::new(engine_state, stack);
|
||||
scope_data.populate_decls();
|
||||
|
||||
Ok(scope_data.collect_aliases(span).into_pipeline_data(ctrlc))
|
||||
Ok(Value::list(scope_data.collect_aliases(head), head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -26,13 +26,10 @@ impl Command for ScopeCommands {
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
let head = call.head;
|
||||
let mut scope_data = ScopeData::new(engine_state, stack);
|
||||
scope_data.populate_decls();
|
||||
|
||||
Ok(scope_data.collect_commands(span).into_pipeline_data(ctrlc))
|
||||
Ok(Value::list(scope_data.collect_commands(head), head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -26,13 +26,10 @@ impl Command for ScopeExterns {
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
let head = call.head;
|
||||
let mut scope_data = ScopeData::new(engine_state, stack);
|
||||
scope_data.populate_decls();
|
||||
|
||||
Ok(scope_data.collect_externs(span).into_pipeline_data(ctrlc))
|
||||
Ok(Value::list(scope_data.collect_externs(head), head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -26,13 +26,10 @@ impl Command for ScopeModules {
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
let head = call.head;
|
||||
let mut scope_data = ScopeData::new(engine_state, stack);
|
||||
scope_data.populate_modules();
|
||||
|
||||
Ok(scope_data.collect_modules(span).into_pipeline_data(ctrlc))
|
||||
Ok(Value::list(scope_data.collect_modules(head), head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -26,13 +26,10 @@ impl Command for ScopeVariables {
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
let head = call.head;
|
||||
let mut scope_data = ScopeData::new(engine_state, stack);
|
||||
scope_data.populate_vars();
|
||||
|
||||
Ok(scope_data.collect_vars(span).into_pipeline_data(ctrlc))
|
||||
Ok(Value::list(scope_data.collect_vars(head), head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -69,35 +69,47 @@ impl Command for PluginList {
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.span();
|
||||
let head = call.head;
|
||||
|
||||
// Group plugin decls by plugin identity
|
||||
let decls = engine_state.plugin_decls().into_group_map_by(|decl| {
|
||||
decl.plugin_identity()
|
||||
.expect("plugin decl should have identity")
|
||||
});
|
||||
|
||||
// Build plugins list
|
||||
let list = engine_state.plugins().iter().map(|plugin| {
|
||||
// Find commands that belong to the plugin
|
||||
let commands = decls.get(plugin.identity())
|
||||
.into_iter()
|
||||
.flat_map(|decls| {
|
||||
decls.iter().map(|decl| Value::string(decl.name(), span))
|
||||
decls.iter().map(|decl| Value::string(decl.name(), head))
|
||||
})
|
||||
.collect();
|
||||
|
||||
Value::record(record! {
|
||||
"name" => Value::string(plugin.identity().name(), span),
|
||||
"is_running" => Value::bool(plugin.is_running(), span),
|
||||
"pid" => plugin.pid()
|
||||
.map(|p| Value::int(p as i64, span))
|
||||
.unwrap_or(Value::nothing(span)),
|
||||
"filename" => Value::string(plugin.identity().filename().to_string_lossy(), span),
|
||||
"shell" => plugin.identity().shell()
|
||||
.map(|s| Value::string(s.to_string_lossy(), span))
|
||||
.unwrap_or(Value::nothing(span)),
|
||||
"commands" => Value::list(commands, span),
|
||||
}, span)
|
||||
}).collect::<Vec<Value>>();
|
||||
Ok(list.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
let pid = plugin
|
||||
.pid()
|
||||
.map(|p| Value::int(p as i64, head))
|
||||
.unwrap_or(Value::nothing(head));
|
||||
|
||||
let shell = plugin
|
||||
.identity()
|
||||
.shell()
|
||||
.map(|s| Value::string(s.to_string_lossy(), head))
|
||||
.unwrap_or(Value::nothing(head));
|
||||
|
||||
let record = record! {
|
||||
"name" => Value::string(plugin.identity().name(), head),
|
||||
"is_running" => Value::bool(plugin.is_running(), head),
|
||||
"pid" => pid,
|
||||
"filename" => Value::string(plugin.identity().filename().to_string_lossy(), head),
|
||||
"shell" => shell,
|
||||
"commands" => Value::list(commands, head),
|
||||
};
|
||||
|
||||
Value::record(record, head)
|
||||
}).collect();
|
||||
|
||||
Ok(Value::list(list, head).into_pipeline_data())
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ pub fn lookup_ansi_color_style(s: &str) -> Style {
|
||||
.and_then(|c| c.map(|c| c.normal()))
|
||||
.unwrap_or_default()
|
||||
} else if s.starts_with('{') {
|
||||
color_string_to_nustyle(s.to_string())
|
||||
color_string_to_nustyle(s)
|
||||
} else {
|
||||
lookup_style(s)
|
||||
}
|
||||
@ -74,13 +74,13 @@ fn get_style_from_value(record: &Record) -> Option<NuStyle> {
|
||||
}
|
||||
}
|
||||
|
||||
fn color_string_to_nustyle(color_string: String) -> Style {
|
||||
fn color_string_to_nustyle(color_string: &str) -> Style {
|
||||
// eprintln!("color_string: {}", &color_string);
|
||||
if color_string.is_empty() {
|
||||
return Style::default();
|
||||
}
|
||||
|
||||
let nu_style = match nu_json::from_str::<NuStyle>(&color_string) {
|
||||
let nu_style = match nu_json::from_str::<NuStyle>(color_string) {
|
||||
Ok(s) => s,
|
||||
Err(_) => return Style::default(),
|
||||
};
|
||||
@ -97,13 +97,13 @@ mod tests {
|
||||
#[test]
|
||||
fn test_color_string_to_nustyle_empty_string() {
|
||||
let color_string = String::new();
|
||||
let style = color_string_to_nustyle(color_string);
|
||||
let style = color_string_to_nustyle(&color_string);
|
||||
assert_eq!(style, Style::default());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_color_string_to_nustyle_valid_string() {
|
||||
let color_string = r#"{"fg": "black", "bg": "white", "attr": "b"}"#.to_string();
|
||||
let color_string = r#"{"fg": "black", "bg": "white", "attr": "b"}"#;
|
||||
let style = color_string_to_nustyle(color_string);
|
||||
assert_eq!(style.foreground, Some(Color::Black));
|
||||
assert_eq!(style.background, Some(Color::White));
|
||||
@ -112,7 +112,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_color_string_to_nustyle_invalid_string() {
|
||||
let color_string = "invalid string".to_string();
|
||||
let color_string = "invalid string";
|
||||
let style = color_string_to_nustyle(color_string);
|
||||
assert_eq!(style, Style::default());
|
||||
}
|
||||
|
@ -3,8 +3,8 @@ use nu_ansi_term::{Color, Style};
|
||||
use nu_protocol::{Config, Value};
|
||||
|
||||
// The default colors for shapes, used when there is no config for them.
|
||||
pub fn default_shape_color(shape: String) -> Style {
|
||||
match shape.as_ref() {
|
||||
pub fn default_shape_color(shape: &str) -> Style {
|
||||
match shape {
|
||||
"shape_and" => Style::new().fg(Color::Purple).bold(),
|
||||
"shape_binary" => Style::new().fg(Color::Purple).bold(),
|
||||
"shape_block" => Style::new().fg(Color::Blue).bold(),
|
||||
@ -45,8 +45,8 @@ pub fn default_shape_color(shape: String) -> Style {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_shape_color(shape: String, conf: &Config) -> Style {
|
||||
match conf.color_config.get(shape.as_str()) {
|
||||
pub fn get_shape_color(shape: &str, conf: &Config) -> Style {
|
||||
match conf.color_config.get(shape) {
|
||||
Some(int_color) => {
|
||||
// Shapes do not use color_config closures, currently.
|
||||
match int_color {
|
||||
|
@ -98,7 +98,7 @@ fn into_cell_path(call: &Call, input: PipelineData) -> Result<PipelineData, Shel
|
||||
match input {
|
||||
PipelineData::Value(value, _) => Ok(value_to_cell_path(&value, head)?.into_pipeline_data()),
|
||||
PipelineData::ListStream(stream, ..) => {
|
||||
let list: Vec<_> = stream.collect();
|
||||
let list: Vec<_> = stream.into_iter().collect();
|
||||
Ok(list_to_cell_path(&list, head)?.into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType {
|
||||
|
@ -39,7 +39,7 @@ impl Zone {
|
||||
Self::Error // Out of range
|
||||
}
|
||||
}
|
||||
fn from_string(s: String) -> Self {
|
||||
fn from_string(s: &str) -> Self {
|
||||
match s.to_ascii_lowercase().as_str() {
|
||||
"utc" | "u" => Self::Utc,
|
||||
"local" | "l" => Self::Local,
|
||||
@ -126,7 +126,7 @@ impl Command for SubCommand {
|
||||
span: zone_offset.span,
|
||||
}),
|
||||
None => timezone.as_ref().map(|zone| Spanned {
|
||||
item: Zone::from_string(zone.item.clone()),
|
||||
item: Zone::from_string(&zone.item),
|
||||
span: zone.span,
|
||||
}),
|
||||
};
|
||||
|
@ -81,7 +81,7 @@ impl Command for IntoValue {
|
||||
display_as_filesizes,
|
||||
span,
|
||||
}
|
||||
.into_pipeline_data(ctrlc)
|
||||
.into_pipeline_data(span, ctrlc)
|
||||
.set_metadata(metadata))
|
||||
}
|
||||
}
|
||||
|
@ -203,8 +203,8 @@ fn action(
|
||||
ctrl_c: Option<Arc<AtomicBool>>,
|
||||
) -> Result<Value, ShellError> {
|
||||
match input {
|
||||
PipelineData::ListStream(list_stream, _) => {
|
||||
insert_in_transaction(list_stream.stream, span, table, ctrl_c)
|
||||
PipelineData::ListStream(stream, _) => {
|
||||
insert_in_transaction(stream.into_iter(), span, table, ctrl_c)
|
||||
}
|
||||
PipelineData::Value(
|
||||
Value::List {
|
||||
@ -269,7 +269,8 @@ fn insert_in_transaction(
|
||||
let insert_statement = format!(
|
||||
"INSERT INTO [{}] ({}) VALUES ({})",
|
||||
table_name,
|
||||
Itertools::intersperse(val.columns().map(String::as_str), ", ").collect::<String>(),
|
||||
Itertools::intersperse(val.columns().map(|c| format!("`{}`", c)), ", ".to_string())
|
||||
.collect::<String>(),
|
||||
Itertools::intersperse(itertools::repeat_n("?", val.len()), ", ").collect::<String>(),
|
||||
);
|
||||
|
||||
@ -390,8 +391,12 @@ fn get_columns_with_sqlite_types(
|
||||
let mut columns: Vec<(String, &'static str)> = vec![];
|
||||
|
||||
for (c, v) in record {
|
||||
if !columns.iter().any(|(name, _)| name == c) {
|
||||
columns.push((c.clone(), nu_value_to_sqlite_type(v)?));
|
||||
if !columns
|
||||
.iter()
|
||||
.map(|name| (format!("`{}`", name.0), name.1))
|
||||
.any(|(name, _)| name == *c)
|
||||
{
|
||||
columns.push((format!("`{}`", c), nu_value_to_sqlite_type(v)?));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -30,17 +30,17 @@ impl Command for SubCommand {
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
let head = call.head;
|
||||
|
||||
Ok(TZ_VARIANTS
|
||||
.iter()
|
||||
.map(move |x| {
|
||||
Value::record(
|
||||
record! { "timezone" => Value::string(x.name(), span) },
|
||||
span,
|
||||
record! { "timezone" => Value::string(x.name(), head) },
|
||||
head,
|
||||
)
|
||||
})
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -8,7 +8,7 @@ pub(crate) fn parse_date_from_string(
|
||||
match dtparse::parse(input) {
|
||||
Ok((native_dt, fixed_offset)) => {
|
||||
let offset = match fixed_offset {
|
||||
Some(fo) => fo,
|
||||
Some(offset) => offset,
|
||||
None => *(Local::now().offset()),
|
||||
};
|
||||
match offset.from_local_datetime(&native_dt) {
|
||||
|
@ -34,16 +34,14 @@ impl Command for Explain {
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
// This was all delightfully stolen from benchmark :)
|
||||
let capture_block: Closure = call.req(engine_state, stack, 0)?;
|
||||
let block = engine_state.get_block(capture_block.block_id);
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let mut stack = stack.captures_to_stack(capture_block.captures);
|
||||
|
||||
let elements = get_pipeline_elements(engine_state, &mut stack, block, call.head);
|
||||
|
||||
Ok(elements.into_pipeline_data(ctrlc))
|
||||
let elements = get_pipeline_elements(engine_state, &mut stack, block, head);
|
||||
Ok(Value::list(elements, head).into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -47,13 +47,21 @@ impl Command for MetadataSet {
|
||||
let metadata = PipelineMetadata {
|
||||
data_source: DataSource::FilePath(path.into()),
|
||||
};
|
||||
Ok(input.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
Ok(input.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
(None, true) => {
|
||||
let metadata = PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
};
|
||||
Ok(input.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
Ok(input.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
_ => Err(ShellError::IncorrectValue {
|
||||
msg: "Expected either --datasource-ls(-l) or --datasource-filepath(-f)".to_string(),
|
||||
|
@ -57,7 +57,7 @@ impl Command for ConfigEnv {
|
||||
|
||||
let env_vars_str = env_to_strings(engine_state, stack)?;
|
||||
let nu_config = match engine_state.get_config_path("env-path") {
|
||||
Some(path) => path.clone(),
|
||||
Some(path) => path,
|
||||
None => {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Could not find $nu.env-path".into(),
|
||||
|
@ -61,7 +61,7 @@ impl Command for ConfigNu {
|
||||
|
||||
let env_vars_str = env_to_strings(engine_state, stack)?;
|
||||
let nu_config = match engine_state.get_config_path("config-path") {
|
||||
Some(path) => path.clone(),
|
||||
Some(path) => path,
|
||||
None => {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Could not find $nu.config-path".into(),
|
||||
|
4
crates/nu-command/src/env/config/utils.rs
vendored
4
crates/nu-command/src/env/config/utils.rs
vendored
@ -1,10 +1,10 @@
|
||||
use crate::ExternalCommand;
|
||||
use nu_protocol::{OutDest, Span, Spanned};
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
pub(crate) fn gen_command(
|
||||
span: Span,
|
||||
config_path: PathBuf,
|
||||
config_path: &Path,
|
||||
item: String,
|
||||
config_args: Vec<String>,
|
||||
env_vars_str: HashMap<String, String>,
|
||||
|
@ -100,6 +100,9 @@ impl Command for Cd {
|
||||
}
|
||||
};
|
||||
|
||||
// Strip the trailing slash from the new path. This is required for PWD.
|
||||
let path = nu_path::strip_trailing_slash(&path);
|
||||
|
||||
// Set OLDPWD.
|
||||
// We're using `Stack::get_env_var()` instead of `EngineState::cwd()` to avoid a conversion roundtrip.
|
||||
if let Some(oldpwd) = stack.get_env_var(engine_state, "PWD") {
|
||||
|
@ -121,7 +121,7 @@ impl Command for Du {
|
||||
};
|
||||
Ok(
|
||||
du_for_one_pattern(args, ¤t_dir, tag, engine_state.ctrlc.clone())?
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()),
|
||||
.into_pipeline_data(tag, engine_state.ctrlc.clone()),
|
||||
)
|
||||
}
|
||||
Some(paths) => {
|
||||
@ -147,7 +147,7 @@ impl Command for Du {
|
||||
Ok(result_iters
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(tag, engine_state.ctrlc.clone()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -199,7 +199,7 @@ impl Command for Glob {
|
||||
}
|
||||
};
|
||||
|
||||
Ok(if !not_patterns.is_empty() {
|
||||
let result = if !not_patterns.is_empty() {
|
||||
let np: Vec<&str> = not_patterns.iter().map(|s| s as &str).collect();
|
||||
let glob_results = glob
|
||||
.walk_with_behavior(
|
||||
@ -218,10 +218,7 @@ impl Command for Glob {
|
||||
inner: vec![],
|
||||
})?
|
||||
.flatten();
|
||||
let result = glob_to_value(ctrlc, glob_results, no_dirs, no_files, no_symlinks, span)?;
|
||||
result
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone())
|
||||
glob_to_value(ctrlc, glob_results, no_dirs, no_files, no_symlinks, span)
|
||||
} else {
|
||||
let glob_results = glob
|
||||
.walk_with_behavior(
|
||||
@ -232,11 +229,12 @@ impl Command for Glob {
|
||||
},
|
||||
)
|
||||
.flatten();
|
||||
let result = glob_to_value(ctrlc, glob_results, no_dirs, no_files, no_symlinks, span)?;
|
||||
result
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone())
|
||||
})
|
||||
glob_to_value(ctrlc, glob_results, no_dirs, no_files, no_symlinks, span)
|
||||
}?;
|
||||
|
||||
Ok(result
|
||||
.into_iter()
|
||||
.into_pipeline_data(span, engine_state.ctrlc.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -115,10 +115,11 @@ impl Command for Ls {
|
||||
match input_pattern_arg {
|
||||
None => Ok(ls_for_one_pattern(None, args, ctrl_c.clone(), cwd)?
|
||||
.into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
ctrl_c,
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
},
|
||||
ctrl_c,
|
||||
)),
|
||||
Some(pattern) => {
|
||||
let mut result_iters = vec![];
|
||||
@ -137,10 +138,11 @@ impl Command for Ls {
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
ctrl_c,
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
},
|
||||
ctrl_c,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
@ -209,7 +209,10 @@ impl Command for Open {
|
||||
} else if output.len() == 1 {
|
||||
Ok(output.remove(0))
|
||||
} else {
|
||||
Ok(output.into_iter().flatten().into_pipeline_data(ctrlc))
|
||||
Ok(output
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.into_pipeline_data(call_span, ctrlc))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -465,7 +465,7 @@ fn rm(
|
||||
}
|
||||
})
|
||||
.filter(|x| !matches!(x.get_type(), Type::Nothing))
|
||||
.into_pipeline_data(ctrlc)
|
||||
.into_pipeline_data(span, ctrlc)
|
||||
.print_not_formatted(engine_state, false, true)?;
|
||||
|
||||
Ok(PipelineData::empty())
|
||||
|
@ -116,7 +116,7 @@ only unwrap the outer list, and leave the variable's contents untouched."#
|
||||
Ok(input
|
||||
.into_iter()
|
||||
.chain(other.into_pipeline_data())
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -62,72 +62,67 @@ impl Command for Columns {
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
_engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
getcol(engine_state, span, input)
|
||||
getcol(call.head, input)
|
||||
}
|
||||
}
|
||||
|
||||
fn getcol(
|
||||
engine_state: &EngineState,
|
||||
head: Span,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
fn getcol(head: Span, input: PipelineData) -> Result<PipelineData, ShellError> {
|
||||
let metadata = input.metadata();
|
||||
match input {
|
||||
PipelineData::Empty => Ok(PipelineData::Empty),
|
||||
PipelineData::Value(v, ..) => {
|
||||
let span = v.span();
|
||||
match v {
|
||||
let cols = match v {
|
||||
Value::List {
|
||||
vals: input_vals, ..
|
||||
} => {
|
||||
let input_cols = get_columns(&input_vals);
|
||||
Ok(input_cols
|
||||
.into_iter()
|
||||
.map(move |x| Value::string(x, span))
|
||||
.into_pipeline_data(ctrlc)
|
||||
.set_metadata(metadata))
|
||||
}
|
||||
} => get_columns(&input_vals)
|
||||
.into_iter()
|
||||
.map(move |x| Value::string(x, span))
|
||||
.collect(),
|
||||
Value::Custom { val, .. } => {
|
||||
// TODO: should we get CustomValue to expose columns in a more efficient way?
|
||||
// Would be nice to be able to get columns without generating the whole value
|
||||
let input_as_base_value = val.to_base_value(span)?;
|
||||
let input_cols = get_columns(&[input_as_base_value]);
|
||||
Ok(input_cols
|
||||
get_columns(&[input_as_base_value])
|
||||
.into_iter()
|
||||
.map(move |x| Value::string(x, span))
|
||||
.into_pipeline_data(ctrlc)
|
||||
.set_metadata(metadata))
|
||||
.collect()
|
||||
}
|
||||
Value::Record { val, .. } => Ok(val
|
||||
Value::Record { val, .. } => val
|
||||
.into_iter()
|
||||
.map(move |(x, _)| Value::string(x, head))
|
||||
.into_pipeline_data(ctrlc)
|
||||
.set_metadata(metadata)),
|
||||
.collect(),
|
||||
// Propagate errors
|
||||
Value::Error { error, .. } => Err(*error),
|
||||
other => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "record or table".into(),
|
||||
wrong_type: other.get_type().to_string(),
|
||||
dst_span: head,
|
||||
src_span: other.span(),
|
||||
}),
|
||||
}
|
||||
Value::Error { error, .. } => return Err(*error),
|
||||
other => {
|
||||
return Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "record or table".into(),
|
||||
wrong_type: other.get_type().to_string(),
|
||||
dst_span: head,
|
||||
src_span: other.span(),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Value::list(cols, head)
|
||||
.into_pipeline_data()
|
||||
.set_metadata(metadata))
|
||||
}
|
||||
PipelineData::ListStream(stream, ..) => {
|
||||
let v: Vec<_> = stream.into_iter().collect();
|
||||
let input_cols = get_columns(&v);
|
||||
|
||||
Ok(input_cols
|
||||
let values = stream.into_iter().collect::<Vec<_>>();
|
||||
let cols = get_columns(&values)
|
||||
.into_iter()
|
||||
.map(move |x| Value::string(x, head))
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc))
|
||||
.map(|s| Value::string(s, head))
|
||||
.collect();
|
||||
|
||||
Ok(Value::list(cols, head)
|
||||
.into_pipeline_data()
|
||||
.set_metadata(metadata))
|
||||
}
|
||||
PipelineData::ExternalStream { .. } => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "record or table".into(),
|
||||
|
@ -90,7 +90,8 @@ fn drop_cols(
|
||||
// is displayed farther to the right.
|
||||
let metadata = input.metadata();
|
||||
match input {
|
||||
PipelineData::ListStream(mut stream, ..) => {
|
||||
PipelineData::ListStream(stream, ..) => {
|
||||
let mut stream = stream.into_iter();
|
||||
if let Some(mut first) = stream.next() {
|
||||
let drop_cols = drop_cols_set(&mut first, head, columns)?;
|
||||
|
||||
@ -101,7 +102,7 @@ fn drop_cols(
|
||||
Err(e) => Value::error(e, head),
|
||||
}
|
||||
}))
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
|
@ -100,6 +100,7 @@ impl Command for DropNth {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let metadata = input.metadata();
|
||||
let number_or_range = extract_int_or_range(engine_state, stack, call)?;
|
||||
|
||||
@ -115,7 +116,7 @@ impl Command for DropNth {
|
||||
return Err(ShellError::UnsupportedInput {
|
||||
msg: "float range".into(),
|
||||
input: "value originates from here".into(),
|
||||
msg_span: call.head,
|
||||
msg_span: head,
|
||||
input_span: number_or_range.span,
|
||||
});
|
||||
}
|
||||
@ -129,7 +130,7 @@ impl Command for DropNth {
|
||||
return Err(ShellError::UnsupportedInput {
|
||||
msg: "drop nth accepts only positive ints".into(),
|
||||
input: "value originates from here".into(),
|
||||
msg_span: call.head,
|
||||
msg_span: head,
|
||||
input_span: number_or_range.span,
|
||||
});
|
||||
}
|
||||
@ -139,7 +140,7 @@ impl Command for DropNth {
|
||||
msg: "The upper bound needs to be equal or larger to the lower bound"
|
||||
.into(),
|
||||
input: "value originates from here".into(),
|
||||
msg_span: call.head,
|
||||
msg_span: head,
|
||||
input_span: number_or_range.span,
|
||||
});
|
||||
}
|
||||
@ -154,8 +155,9 @@ impl Command for DropNth {
|
||||
.into_iter()
|
||||
.take(start)
|
||||
.into_pipeline_data_with_metadata(
|
||||
metadata,
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
};
|
||||
@ -175,7 +177,7 @@ impl Command for DropNth {
|
||||
rows,
|
||||
current: 0,
|
||||
}
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -138,7 +138,7 @@ with 'transpose' first."#
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
@ -170,7 +170,7 @@ with 'transpose' first."#
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
// This match allows non-iterables to be accepted,
|
||||
// which is currently considered undesirable (Nov 2022).
|
||||
|
@ -60,10 +60,11 @@ pub fn empty(
|
||||
}
|
||||
},
|
||||
PipelineData::ListStream(s, ..) => {
|
||||
let empty = s.into_iter().next().is_none();
|
||||
if negate {
|
||||
Ok(Value::bool(s.count() != 0, head).into_pipeline_data())
|
||||
Ok(Value::bool(!empty, head).into_pipeline_data())
|
||||
} else {
|
||||
Ok(Value::bool(s.count() == 0, head).into_pipeline_data())
|
||||
Ok(Value::bool(empty, head).into_pipeline_data())
|
||||
}
|
||||
}
|
||||
PipelineData::Value(value, ..) => {
|
||||
|
@ -50,9 +50,9 @@ impl Command for Enumerate {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let metadata = input.metadata();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let span = call.head;
|
||||
|
||||
Ok(input
|
||||
.into_iter()
|
||||
@ -60,13 +60,13 @@ impl Command for Enumerate {
|
||||
.map(move |(idx, x)| {
|
||||
Value::record(
|
||||
record! {
|
||||
"index" => Value::int(idx as i64, span),
|
||||
"index" => Value::int(idx as i64, head),
|
||||
"item" => x,
|
||||
},
|
||||
span,
|
||||
head,
|
||||
)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc))
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,7 +78,7 @@ impl Command for Every {
|
||||
None
|
||||
}
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -67,7 +67,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
|
||||
Some(Value::error(err, span))
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
@ -92,7 +92,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
// This match allows non-iterables to be accepted,
|
||||
// which is currently considered undesirable (Nov 2022).
|
||||
@ -108,7 +108,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
|
||||
Some(Value::error(err, span))
|
||||
}
|
||||
}
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
}
|
||||
.map(|data| data.set_metadata(metadata))
|
||||
|
@ -3,7 +3,7 @@ use fancy_regex::Regex;
|
||||
use nu_ansi_term::Style;
|
||||
use nu_color_config::StyleComputer;
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{Config, ListStream};
|
||||
use nu_protocol::Config;
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -416,9 +416,9 @@ fn find_with_rest_and_highlight(
|
||||
},
|
||||
ctrlc,
|
||||
),
|
||||
PipelineData::ListStream(stream, metadata) => Ok(ListStream::from_stream(
|
||||
stream
|
||||
.map(move |mut x| {
|
||||
PipelineData::ListStream(stream, metadata) => {
|
||||
let stream = stream.modify(|iter| {
|
||||
iter.map(move |mut x| {
|
||||
let span = x.span();
|
||||
match &mut x {
|
||||
Value::Record { val, .. } => highlight_terms_in_record_with_search_columns(
|
||||
@ -442,10 +442,11 @@ fn find_with_rest_and_highlight(
|
||||
&cols_to_search_in_filter,
|
||||
invert,
|
||||
)
|
||||
}),
|
||||
ctrlc.clone(),
|
||||
)
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc)),
|
||||
})
|
||||
});
|
||||
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
}
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
@ -496,7 +497,7 @@ fn find_with_rest_and_highlight(
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
}
|
||||
Ok(output.into_pipeline_data(ctrlc))
|
||||
Ok(output.into_pipeline_data(span, ctrlc))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -143,7 +143,7 @@ fn first_helper(
|
||||
} else {
|
||||
Ok(iter
|
||||
.take(rows)
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc))
|
||||
.into_pipeline_data_with_metadata(span, ctrlc, metadata))
|
||||
}
|
||||
}
|
||||
// Propagate errors by explicitly matching them before the final case.
|
||||
@ -156,17 +156,18 @@ fn first_helper(
|
||||
}),
|
||||
}
|
||||
}
|
||||
PipelineData::ListStream(mut ls, metadata) => {
|
||||
PipelineData::ListStream(stream, metadata) => {
|
||||
if return_single_element {
|
||||
if let Some(v) = ls.next() {
|
||||
if let Some(v) = stream.into_iter().next() {
|
||||
Ok(v.into_pipeline_data())
|
||||
} else {
|
||||
Err(ShellError::AccessEmptyContent { span: head })
|
||||
}
|
||||
} else {
|
||||
Ok(ls
|
||||
.take(rows)
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
Ok(PipelineData::ListStream(
|
||||
stream.modify(|iter| iter.take(rows)),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
}
|
||||
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType {
|
||||
|
@ -89,7 +89,7 @@ If multiple cell paths are given, this will produce a list of values."#
|
||||
output.push(val?);
|
||||
}
|
||||
|
||||
Ok(output.into_iter().into_pipeline_data(ctrlc))
|
||||
Ok(output.into_iter().into_pipeline_data(span, ctrlc))
|
||||
}
|
||||
.map(|x| x.set_metadata(metadata))
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::ValueIterator;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Group;
|
||||
@ -52,6 +53,7 @@ impl Command for Group {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let group_size: Spanned<usize> = call.req(engine_state, stack, 0)?;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let metadata = input.metadata();
|
||||
@ -61,16 +63,16 @@ impl Command for Group {
|
||||
let each_group_iterator = EachGroupIterator {
|
||||
group_size: group_size.item,
|
||||
input: Box::new(input.into_iter()),
|
||||
span: call.head,
|
||||
span: head,
|
||||
};
|
||||
|
||||
Ok(each_group_iterator.into_pipeline_data_with_metadata(metadata, ctrlc))
|
||||
Ok(each_group_iterator.into_pipeline_data_with_metadata(head, ctrlc, metadata))
|
||||
}
|
||||
}
|
||||
|
||||
struct EachGroupIterator {
|
||||
group_size: usize,
|
||||
input: Box<dyn Iterator<Item = Value> + Send>,
|
||||
input: ValueIterator,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
|
@ -159,7 +159,7 @@ fn insert(
|
||||
}
|
||||
Ok(value.into_pipeline_data_with_metadata(metadata))
|
||||
}
|
||||
PipelineData::ListStream(mut stream, metadata) => {
|
||||
PipelineData::ListStream(stream, metadata) => {
|
||||
if let Some((
|
||||
&PathMember::Int {
|
||||
val,
|
||||
@ -169,6 +169,7 @@ fn insert(
|
||||
path,
|
||||
)) = cell_path.members.split_first()
|
||||
{
|
||||
let mut stream = stream.into_iter();
|
||||
let mut pre_elems = vec![];
|
||||
|
||||
for idx in 0..val {
|
||||
@ -221,40 +222,39 @@ fn insert(
|
||||
Ok(pre_elems
|
||||
.into_iter()
|
||||
.chain(stream)
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
} else if let Value::Closure { val, .. } = replacement {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, val);
|
||||
Ok(stream
|
||||
.map(move |mut value| {
|
||||
let err = insert_value_by_closure(
|
||||
&mut value,
|
||||
&mut closure,
|
||||
head,
|
||||
&cell_path.members,
|
||||
false,
|
||||
);
|
||||
let stream = stream.map(move |mut value| {
|
||||
let err = insert_value_by_closure(
|
||||
&mut value,
|
||||
&mut closure,
|
||||
head,
|
||||
&cell_path.members,
|
||||
false,
|
||||
);
|
||||
|
||||
if let Err(e) = err {
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
if let Err(e) = err {
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
});
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
} else {
|
||||
Ok(stream
|
||||
.map(move |mut value| {
|
||||
if let Err(e) = value.insert_data_at_cell_path(
|
||||
&cell_path.members,
|
||||
replacement.clone(),
|
||||
head,
|
||||
) {
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
let stream = stream.map(move |mut value| {
|
||||
if let Err(e) = value.insert_data_at_cell_path(
|
||||
&cell_path.members,
|
||||
replacement.clone(),
|
||||
head,
|
||||
) {
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
});
|
||||
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
}
|
||||
}
|
||||
PipelineData::Empty => Err(ShellError::IncompatiblePathAccess {
|
||||
|
@ -147,7 +147,7 @@ interleave
|
||||
// Now that threads are writing to the channel, we just return it as a stream
|
||||
Ok(rx
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -65,7 +65,7 @@ impl Command for Items {
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
Value::Error { error, .. } => Err(*error),
|
||||
other => Err(ShellError::OnlySupportsThisInputType {
|
||||
|
@ -52,9 +52,8 @@ impl Command for Lines {
|
||||
}
|
||||
PipelineData::Empty => Ok(PipelineData::Empty),
|
||||
PipelineData::ListStream(stream, metadata) => {
|
||||
let iter = stream
|
||||
.into_iter()
|
||||
.filter_map(move |value| {
|
||||
let stream = stream.modify(|iter| {
|
||||
iter.filter_map(move |value| {
|
||||
let span = value.span();
|
||||
if let Value::String { val, .. } = value {
|
||||
Some(
|
||||
@ -72,11 +71,10 @@ impl Command for Lines {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten();
|
||||
.flatten()
|
||||
});
|
||||
|
||||
Ok(iter
|
||||
.into_pipeline_data(engine_state.ctrlc.clone())
|
||||
.set_metadata(metadata))
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
}
|
||||
PipelineData::Value(val, ..) => {
|
||||
match val {
|
||||
@ -97,7 +95,7 @@ impl Command for Lines {
|
||||
..
|
||||
} => Ok(RawStreamLinesAdapter::new(stream, head, skip_empty)
|
||||
.map(move |x| x.unwrap_or_else(|err| Value::error(err, head)))
|
||||
.into_pipeline_data(ctrlc)
|
||||
.into_pipeline_data(head, ctrlc)
|
||||
.set_metadata(metadata)),
|
||||
}
|
||||
}
|
||||
|
@ -85,11 +85,10 @@ repeating this process with row 1, and so on."#
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let merge_value: Value = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let metadata = input.metadata();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let call = call.clone();
|
||||
|
||||
match (&input, merge_value) {
|
||||
// table (list of records)
|
||||
@ -104,29 +103,25 @@ repeating this process with row 1, and so on."#
|
||||
.into_iter()
|
||||
.map(move |inp| match (inp.as_record(), table_iter.next()) {
|
||||
(Ok(inp), Some(to_merge)) => match to_merge.as_record() {
|
||||
Ok(to_merge) => Value::record(do_merge(inp, to_merge), call.head),
|
||||
Err(error) => Value::error(error, call.head),
|
||||
Ok(to_merge) => Value::record(do_merge(inp, to_merge), head),
|
||||
Err(error) => Value::error(error, head),
|
||||
},
|
||||
(_, None) => inp,
|
||||
(Err(error), _) => Value::error(error, call.head),
|
||||
(Err(error), _) => Value::error(error, head),
|
||||
});
|
||||
|
||||
if let Some(md) = metadata {
|
||||
Ok(res.into_pipeline_data_with_metadata(md, ctrlc))
|
||||
} else {
|
||||
Ok(res.into_pipeline_data(ctrlc))
|
||||
}
|
||||
Ok(res.into_pipeline_data_with_metadata(head, ctrlc, metadata))
|
||||
}
|
||||
// record
|
||||
(
|
||||
PipelineData::Value(Value::Record { val: inp, .. }, ..),
|
||||
Value::Record { val: to_merge, .. },
|
||||
) => Ok(Value::record(do_merge(inp, &to_merge), call.head).into_pipeline_data()),
|
||||
) => Ok(Value::record(do_merge(inp, &to_merge), head).into_pipeline_data()),
|
||||
(PipelineData::Value(val, ..), ..) => {
|
||||
// Only point the "value originates here" arrow at the merge value
|
||||
// if it was generated from a block. Otherwise, point at the pipeline value. -Leon 2022-10-27
|
||||
let span = if val.span() == Span::test_data() {
|
||||
Span::new(call.head.start, call.head.start)
|
||||
Span::new(head.start, head.start)
|
||||
} else {
|
||||
val.span()
|
||||
};
|
||||
@ -134,14 +129,14 @@ repeating this process with row 1, and so on."#
|
||||
Err(ShellError::PipelineMismatch {
|
||||
exp_input_type: "input, and argument, to be both record or both table"
|
||||
.to_string(),
|
||||
dst_span: call.head,
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
})
|
||||
}
|
||||
_ => Err(ShellError::PipelineMismatch {
|
||||
exp_input_type: "input, and argument, to be both record or both table".to_string(),
|
||||
dst_span: call.head,
|
||||
src_span: Span::new(call.head.start, call.head.start),
|
||||
dst_span: head,
|
||||
src_span: Span::new(head.start, head.start),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -109,6 +109,7 @@ impl Command for Move {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let columns: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||
let after: Option<Value> = call.get_flag(engine_state, stack, "after")?;
|
||||
let before: Option<Value> = call.get_flag(engine_state, stack, "before")?;
|
||||
@ -126,7 +127,7 @@ impl Command for Move {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Cannot move columns".into(),
|
||||
msg: "Use either --after, or --before, not both".into(),
|
||||
span: Some(call.head),
|
||||
span: Some(head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
@ -135,7 +136,7 @@ impl Command for Move {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Cannot move columns".into(),
|
||||
msg: "Missing --after or --before flag".into(),
|
||||
span: Some(call.head),
|
||||
span: Some(head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
@ -144,36 +145,29 @@ impl Command for Move {
|
||||
|
||||
let metadata = input.metadata();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let call = call.clone();
|
||||
|
||||
match input {
|
||||
PipelineData::Value(Value::List { .. }, ..) | PipelineData::ListStream { .. } => {
|
||||
let res = input.into_iter().map(move |x| match x.as_record() {
|
||||
Ok(record) => {
|
||||
match move_record_columns(record, &columns, &before_or_after, call.head) {
|
||||
match move_record_columns(record, &columns, &before_or_after, head) {
|
||||
Ok(val) => val,
|
||||
Err(error) => Value::error(error, call.head),
|
||||
Err(error) => Value::error(error, head),
|
||||
}
|
||||
}
|
||||
Err(error) => Value::error(error, call.head),
|
||||
Err(error) => Value::error(error, head),
|
||||
});
|
||||
|
||||
if let Some(md) = metadata {
|
||||
Ok(res.into_pipeline_data_with_metadata(md, ctrlc))
|
||||
} else {
|
||||
Ok(res.into_pipeline_data(ctrlc))
|
||||
}
|
||||
Ok(res.into_pipeline_data_with_metadata(head, ctrlc, metadata))
|
||||
}
|
||||
PipelineData::Value(Value::Record { val, .. }, ..) => {
|
||||
Ok(
|
||||
move_record_columns(&val, &columns, &before_or_after, call.head)?
|
||||
.into_pipeline_data(),
|
||||
)
|
||||
Ok(move_record_columns(&val, &columns, &before_or_after, head)?
|
||||
.into_pipeline_data())
|
||||
}
|
||||
_ => Err(ShellError::PipelineMismatch {
|
||||
exp_input_type: "record or table".to_string(),
|
||||
dst_span: call.head,
|
||||
src_span: Span::new(call.head.start, call.head.start),
|
||||
dst_span: head,
|
||||
src_span: Span::new(head.start, head.start),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -159,7 +159,7 @@ impl Command for ParEach {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
apply_order(vec).into_pipeline_data(engine_state.ctrlc.clone())
|
||||
apply_order(vec).into_pipeline_data(span, engine_state.ctrlc.clone())
|
||||
})),
|
||||
Value::Range { val, .. } => Ok(create_pool(max_threads)?.install(|| {
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
@ -186,7 +186,7 @@ impl Command for ParEach {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
apply_order(vec).into_pipeline_data(ctrlc)
|
||||
apply_order(vec).into_pipeline_data(span, ctrlc)
|
||||
})),
|
||||
// This match allows non-iterables to be accepted,
|
||||
// which is currently considered undesirable (Nov 2022).
|
||||
@ -197,6 +197,7 @@ impl Command for ParEach {
|
||||
}
|
||||
PipelineData::ListStream(stream, ..) => Ok(create_pool(max_threads)?.install(|| {
|
||||
let vec = stream
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.par_bridge()
|
||||
.map(move |(index, value)| {
|
||||
@ -216,7 +217,7 @@ impl Command for ParEach {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
apply_order(vec).into_pipeline_data(engine_state.ctrlc.clone())
|
||||
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
|
||||
})),
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
@ -241,7 +242,7 @@ impl Command for ParEach {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
apply_order(vec).into_pipeline_data(engine_state.ctrlc.clone())
|
||||
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
|
||||
})),
|
||||
}
|
||||
.and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.ctrlc.clone()))
|
||||
|
@ -117,7 +117,7 @@ only unwrap the outer list, and leave the variable's contents untouched."#
|
||||
.into_pipeline_data()
|
||||
.into_iter()
|
||||
.chain(input)
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -64,6 +64,7 @@ impl Command for Range {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let metadata = input.metadata();
|
||||
let rows: Spanned<NumRange> = call.req(engine_state, stack, 0)?;
|
||||
|
||||
@ -102,20 +103,20 @@ impl Command for Range {
|
||||
};
|
||||
|
||||
if from > to {
|
||||
Ok(PipelineData::Value(Value::nothing(call.head), None))
|
||||
Ok(PipelineData::Value(Value::nothing(head), None))
|
||||
} else {
|
||||
let iter = v.into_iter().skip(from).take(to - from + 1);
|
||||
Ok(iter.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
Ok(iter.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
} else {
|
||||
let from = start as usize;
|
||||
let to = end as usize;
|
||||
|
||||
if from > to {
|
||||
Ok(PipelineData::Value(Value::nothing(call.head), None))
|
||||
Ok(PipelineData::Value(Value::nothing(head), None))
|
||||
} else {
|
||||
let iter = input.into_iter().skip(from).take(to - from + 1);
|
||||
Ok(iter.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
Ok(iter.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
}
|
||||
.map(|x| x.set_metadata(metadata))
|
||||
|
@ -59,11 +59,11 @@ impl Command for Reverse {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let metadata = input.metadata();
|
||||
|
||||
let v: Vec<_> = input.into_iter_strict(call.head)?.collect();
|
||||
let iter = v.into_iter().rev();
|
||||
Ok(iter.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
let values = input.into_iter_strict(head)?.collect::<Vec<_>>();
|
||||
let iter = values.into_iter().rev();
|
||||
Ok(iter.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -215,7 +215,7 @@ fn select(
|
||||
rows: unique_rows.into_iter().peekable(),
|
||||
current: 0,
|
||||
}
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone())
|
||||
.into_pipeline_data_with_metadata(call_span, engine_state.ctrlc.clone(), metadata)
|
||||
} else {
|
||||
input
|
||||
};
|
||||
@ -253,9 +253,11 @@ fn select(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output
|
||||
.into_iter()
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
Ok(output.into_iter().into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
_ => {
|
||||
if !columns.is_empty() {
|
||||
@ -300,7 +302,11 @@ fn select(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(values.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
Ok(values.into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
_ => Ok(PipelineData::empty()),
|
||||
}
|
||||
|
@ -30,10 +30,10 @@ impl Command for Shuffle {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let metadata = input.metadata();
|
||||
let mut v: Vec<_> = input.into_iter_strict(call.head)?.collect();
|
||||
v.shuffle(&mut thread_rng());
|
||||
let iter = v.into_iter();
|
||||
Ok(iter.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
let mut values = input.into_iter_strict(call.head)?.collect::<Vec<_>>();
|
||||
values.shuffle(&mut thread_rng());
|
||||
let iter = values.into_iter();
|
||||
Ok(iter.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -101,7 +101,7 @@ impl Command for Skip {
|
||||
_ => Ok(input
|
||||
.into_iter_strict(call.head)?
|
||||
.skip(n)
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc)),
|
||||
.into_pipeline_data_with_metadata(input_span, ctrlc, metadata)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ impl Command for SkipUntil {
|
||||
.map(|data| data.into_value(head).is_false())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,7 +93,7 @@ impl Command for SkipWhile {
|
||||
.map(|data| data.into_value(head).is_true())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,10 +134,11 @@ impl Command for Sort {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let reverse = call.has_flag(engine_state, stack, "reverse")?;
|
||||
let insensitive = call.has_flag(engine_state, stack, "ignore-case")?;
|
||||
let natural = call.has_flag(engine_state, stack, "natural")?;
|
||||
let metadata = &input.metadata();
|
||||
let metadata = input.metadata();
|
||||
|
||||
let span = input.span().unwrap_or(call.head);
|
||||
match input {
|
||||
@ -156,18 +157,18 @@ impl Command for Sort {
|
||||
pipe_data => {
|
||||
let mut vec: Vec<_> = pipe_data.into_iter().collect();
|
||||
|
||||
sort(&mut vec, call.head, insensitive, natural)?;
|
||||
sort(&mut vec, head, insensitive, natural)?;
|
||||
|
||||
if reverse {
|
||||
vec.reverse()
|
||||
}
|
||||
|
||||
let iter = vec.into_iter();
|
||||
match metadata {
|
||||
Some(m) => Ok(iter
|
||||
.into_pipeline_data_with_metadata(m.clone(), engine_state.ctrlc.clone())),
|
||||
None => Ok(iter.into_pipeline_data(engine_state.ctrlc.clone())),
|
||||
}
|
||||
Ok(iter.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -78,33 +78,29 @@ impl Command for SortBy {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let columns: Vec<String> = call.rest(engine_state, stack, 0)?;
|
||||
let reverse = call.has_flag(engine_state, stack, "reverse")?;
|
||||
let insensitive = call.has_flag(engine_state, stack, "ignore-case")?;
|
||||
let natural = call.has_flag(engine_state, stack, "natural")?;
|
||||
let metadata = &input.metadata();
|
||||
let mut vec: Vec<_> = input.into_iter_strict(call.head)?.collect();
|
||||
let metadata = input.metadata();
|
||||
let mut vec: Vec<_> = input.into_iter_strict(head)?.collect();
|
||||
|
||||
if columns.is_empty() {
|
||||
return Err(ShellError::MissingParameter {
|
||||
param_name: "columns".into(),
|
||||
span: call.head,
|
||||
span: head,
|
||||
});
|
||||
}
|
||||
|
||||
crate::sort(&mut vec, columns, call.head, insensitive, natural)?;
|
||||
crate::sort(&mut vec, columns, head, insensitive, natural)?;
|
||||
|
||||
if reverse {
|
||||
vec.reverse()
|
||||
}
|
||||
|
||||
let iter = vec.into_iter();
|
||||
match metadata {
|
||||
Some(m) => {
|
||||
Ok(iter.into_pipeline_data_with_metadata(m.clone(), engine_state.ctrlc.clone()))
|
||||
}
|
||||
None => Ok(iter.into_pipeline_data(engine_state.ctrlc.clone())),
|
||||
}
|
||||
Ok(iter.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,6 +42,7 @@ impl Command for Take {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let rows_desired: usize = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
@ -54,7 +55,7 @@ impl Command for Take {
|
||||
Value::List { vals, .. } => Ok(vals
|
||||
.into_iter()
|
||||
.take(rows_desired)
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc)),
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
Value::Binary { val, .. } => {
|
||||
let slice: Vec<u8> = val.into_iter().take(rows_desired).collect();
|
||||
Ok(PipelineData::Value(Value::binary(slice, span), metadata))
|
||||
@ -62,33 +63,34 @@ impl Command for Take {
|
||||
Value::Range { val, .. } => Ok(val
|
||||
.into_range_iter(span, ctrlc.clone())
|
||||
.take(rows_desired)
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc)),
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
// Propagate errors by explicitly matching them before the final case.
|
||||
Value::Error { error, .. } => Err(*error),
|
||||
other => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: other.get_type().to_string(),
|
||||
dst_span: call.head,
|
||||
dst_span: head,
|
||||
src_span: other.span(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
PipelineData::ListStream(ls, metadata) => Ok(ls
|
||||
.take(rows_desired)
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc)),
|
||||
PipelineData::ListStream(stream, metadata) => Ok(PipelineData::ListStream(
|
||||
stream.modify(|iter| iter.take(rows_desired)),
|
||||
metadata,
|
||||
)),
|
||||
PipelineData::ExternalStream { span, .. } => {
|
||||
Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
dst_span: call.head,
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
})
|
||||
}
|
||||
PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: "null".into(),
|
||||
dst_span: call.head,
|
||||
src_span: call.head,
|
||||
dst_span: head,
|
||||
src_span: head,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ impl Command for TakeUntil {
|
||||
.map(|data| data.into_value(head).is_false())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ impl Command for TakeWhile {
|
||||
.map(|data| data.into_value(head).is_true())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -64,6 +64,7 @@ use it in your pipeline."#
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let use_stderr = call.has_flag(engine_state, stack, "stderr")?;
|
||||
|
||||
let Spanned {
|
||||
@ -125,7 +126,7 @@ use it in your pipeline."#
|
||||
if use_stderr {
|
||||
let stderr = stderr
|
||||
.map(|stderr| {
|
||||
let iter = tee(stderr.stream, with_stream).err_span(call.head)?;
|
||||
let iter = tee(stderr.stream, with_stream).err_span(head)?;
|
||||
Ok::<_, ShellError>(RawStream::new(
|
||||
Box::new(iter.map(flatten_result)),
|
||||
stderr.ctrlc,
|
||||
@ -145,7 +146,7 @@ use it in your pipeline."#
|
||||
} else {
|
||||
let stdout = stdout
|
||||
.map(|stdout| {
|
||||
let iter = tee(stdout.stream, with_stream).err_span(call.head)?;
|
||||
let iter = tee(stdout.stream, with_stream).err_span(head)?;
|
||||
Ok::<_, ShellError>(RawStream::new(
|
||||
Box::new(iter.map(flatten_result)),
|
||||
stdout.ctrlc,
|
||||
@ -168,15 +169,16 @@ use it in your pipeline."#
|
||||
_ if use_stderr => Err(ShellError::UnsupportedInput {
|
||||
msg: "--stderr can only be used on external streams".into(),
|
||||
input: "the input to `tee` is not an external stream".into(),
|
||||
msg_span: call.head,
|
||||
input_span: input.span().unwrap_or(call.head),
|
||||
msg_span: head,
|
||||
input_span: input.span().unwrap_or(head),
|
||||
}),
|
||||
// Handle others with the plain iterator
|
||||
_ => {
|
||||
let teed = tee(input.into_iter(), move |rx| {
|
||||
let input_from_channel = rx.into_pipeline_data_with_metadata(
|
||||
metadata_clone,
|
||||
head,
|
||||
closure_engine_state.ctrlc.clone(),
|
||||
metadata_clone,
|
||||
);
|
||||
let result = eval_block_with_early_return(
|
||||
&closure_engine_state,
|
||||
@ -187,9 +189,13 @@ use it in your pipeline."#
|
||||
// Make sure to drain any iterator produced to avoid unexpected behavior
|
||||
result.and_then(|data| data.drain())
|
||||
})
|
||||
.err_span(call.head)?
|
||||
.err_span(head)?
|
||||
.map(move |result| result.unwrap_or_else(|err| Value::error(err, closure_span)))
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone());
|
||||
.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
);
|
||||
|
||||
Ok(teed)
|
||||
}
|
||||
|
@ -284,7 +284,7 @@ pub fn transpose(
|
||||
metadata,
|
||||
))
|
||||
} else {
|
||||
Ok(result_data.into_pipeline_data_with_metadata(metadata, ctrlc))
|
||||
Ok(result_data.into_pipeline_data_with_metadata(name, ctrlc, metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -143,7 +143,7 @@ fn update(
|
||||
}
|
||||
Ok(value.into_pipeline_data_with_metadata(metadata))
|
||||
}
|
||||
PipelineData::ListStream(mut stream, metadata) => {
|
||||
PipelineData::ListStream(stream, metadata) => {
|
||||
if let Some((
|
||||
&PathMember::Int {
|
||||
val,
|
||||
@ -153,6 +153,7 @@ fn update(
|
||||
path,
|
||||
)) = cell_path.members.split_first()
|
||||
{
|
||||
let mut stream = stream.into_iter();
|
||||
let mut pre_elems = vec![];
|
||||
|
||||
for idx in 0..=val {
|
||||
@ -186,38 +187,38 @@ fn update(
|
||||
Ok(pre_elems
|
||||
.into_iter()
|
||||
.chain(stream)
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
} else if let Value::Closure { val, .. } = replacement {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, val);
|
||||
Ok(stream
|
||||
.map(move |mut value| {
|
||||
let err = update_value_by_closure(
|
||||
&mut value,
|
||||
&mut closure,
|
||||
head,
|
||||
&cell_path.members,
|
||||
false,
|
||||
);
|
||||
let stream = stream.map(move |mut value| {
|
||||
let err = update_value_by_closure(
|
||||
&mut value,
|
||||
&mut closure,
|
||||
head,
|
||||
&cell_path.members,
|
||||
false,
|
||||
);
|
||||
|
||||
if let Err(e) = err {
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
if let Err(e) = err {
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
});
|
||||
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
} else {
|
||||
Ok(stream
|
||||
.map(move |mut value| {
|
||||
if let Err(e) =
|
||||
value.update_data_at_cell_path(&cell_path.members, replacement.clone())
|
||||
{
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
let stream = stream.map(move |mut value| {
|
||||
if let Err(e) =
|
||||
value.update_data_at_cell_path(&cell_path.members, replacement.clone())
|
||||
{
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
});
|
||||
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
}
|
||||
}
|
||||
PipelineData::Empty => Err(ShellError::IncompatiblePathAccess {
|
||||
|
@ -189,7 +189,7 @@ fn upsert(
|
||||
}
|
||||
Ok(value.into_pipeline_data_with_metadata(metadata))
|
||||
}
|
||||
PipelineData::ListStream(mut stream, metadata) => {
|
||||
PipelineData::ListStream(stream, metadata) => {
|
||||
if let Some((
|
||||
&PathMember::Int {
|
||||
val,
|
||||
@ -199,6 +199,7 @@ fn upsert(
|
||||
path,
|
||||
)) = cell_path.members.split_first()
|
||||
{
|
||||
let mut stream = stream.into_iter();
|
||||
let mut pre_elems = vec![];
|
||||
|
||||
for idx in 0..val {
|
||||
@ -246,38 +247,38 @@ fn upsert(
|
||||
Ok(pre_elems
|
||||
.into_iter()
|
||||
.chain(stream)
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
} else if let Value::Closure { val, .. } = replacement {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, val);
|
||||
Ok(stream
|
||||
.map(move |mut value| {
|
||||
let err = upsert_value_by_closure(
|
||||
&mut value,
|
||||
&mut closure,
|
||||
head,
|
||||
&cell_path.members,
|
||||
false,
|
||||
);
|
||||
let stream = stream.map(move |mut value| {
|
||||
let err = upsert_value_by_closure(
|
||||
&mut value,
|
||||
&mut closure,
|
||||
head,
|
||||
&cell_path.members,
|
||||
false,
|
||||
);
|
||||
|
||||
if let Err(e) = err {
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
if let Err(e) = err {
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
});
|
||||
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
} else {
|
||||
Ok(stream
|
||||
.map(move |mut value| {
|
||||
if let Err(e) =
|
||||
value.upsert_data_at_cell_path(&cell_path.members, replacement.clone())
|
||||
{
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
let stream = stream.map(move |mut value| {
|
||||
if let Err(e) =
|
||||
value.upsert_data_at_cell_path(&cell_path.members, replacement.clone())
|
||||
{
|
||||
Value::error(e, head)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
});
|
||||
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
}
|
||||
}
|
||||
PipelineData::Empty => Err(ShellError::IncompatiblePathAccess {
|
||||
|
@ -144,7 +144,7 @@ fn values(
|
||||
Value::List { vals, .. } => match get_values(&vals, head, span) {
|
||||
Ok(cols) => Ok(cols
|
||||
.into_iter()
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc)),
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
Value::Custom { val, .. } => {
|
||||
@ -152,7 +152,7 @@ fn values(
|
||||
match get_values(&[input_as_base_value], head, span) {
|
||||
Ok(cols) => Ok(cols
|
||||
.into_iter()
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc)),
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
@ -160,7 +160,7 @@ fn values(
|
||||
.values()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc)),
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
// Propagate errors
|
||||
Value::Error { error, .. } => Err(*error),
|
||||
other => Err(ShellError::OnlySupportsThisInputType {
|
||||
@ -176,7 +176,7 @@ fn values(
|
||||
match get_values(&vals, head, head) {
|
||||
Ok(cols) => Ok(cols
|
||||
.into_iter()
|
||||
.into_pipeline_data_with_metadata(metadata, ctrlc)),
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ not supported."#
|
||||
Ok(data) => data.into_value(head).is_true().then_some(value),
|
||||
Err(err) => Some(Value::error(err, head)),
|
||||
})
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -1,4 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::ValueIterator;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Window;
|
||||
@ -110,6 +111,7 @@ impl Command for Window {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let group_size: Spanned<usize> = call.req(engine_state, stack, 0)?;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let metadata = input.metadata();
|
||||
@ -123,19 +125,19 @@ impl Command for Window {
|
||||
let each_group_iterator = EachWindowIterator {
|
||||
group_size: group_size.item,
|
||||
input: Box::new(input.into_iter()),
|
||||
span: call.head,
|
||||
span: head,
|
||||
previous: None,
|
||||
stride,
|
||||
remainder,
|
||||
};
|
||||
|
||||
Ok(each_group_iterator.into_pipeline_data_with_metadata(metadata, ctrlc))
|
||||
Ok(each_group_iterator.into_pipeline_data_with_metadata(head, ctrlc, metadata))
|
||||
}
|
||||
}
|
||||
|
||||
struct EachWindowIterator {
|
||||
group_size: usize,
|
||||
input: Box<dyn Iterator<Item = Value> + Send>,
|
||||
input: ValueIterator,
|
||||
span: Span,
|
||||
previous: Option<Vec<Value>>,
|
||||
stride: usize,
|
||||
|
@ -42,9 +42,9 @@ impl Command for Wrap {
|
||||
| PipelineData::ListStream { .. } => Ok(input
|
||||
.into_iter()
|
||||
.map(move |x| Value::record(record! { name.clone() => x }, span))
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone())),
|
||||
.into_pipeline_data_with_metadata(span, engine_state.ctrlc.clone(), metadata)),
|
||||
PipelineData::ExternalStream { .. } => Ok(Value::record(
|
||||
record! { name => input.into_value(call.head) },
|
||||
record! { name => input.into_value(span) },
|
||||
span,
|
||||
)
|
||||
.into_pipeline_data_with_metadata(metadata)),
|
||||
|
@ -112,7 +112,7 @@ impl Command for Zip {
|
||||
.into_iter()
|
||||
.zip(other)
|
||||
.map(move |(x, y)| Value::list(vec![x, y], head))
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,8 +84,11 @@ impl Command for FromJson {
|
||||
.collect()
|
||||
};
|
||||
|
||||
Ok(converted_lines
|
||||
.into_pipeline_data_with_metadata(metadata, engine_state.ctrlc.clone()))
|
||||
Ok(converted_lines.into_pipeline_data_with_metadata(
|
||||
span,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
))
|
||||
} else if strict {
|
||||
Ok(convert_string_to_value_strict(&string_input, span)?
|
||||
.into_pipeline_data_with_metadata(metadata))
|
||||
|
@ -109,10 +109,9 @@ MessagePack: https://msgpack.org/
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = input.span().unwrap_or(call.head);
|
||||
let objects = call.has_flag(engine_state, stack, "objects")?;
|
||||
let opts = Opts {
|
||||
span,
|
||||
span: call.head,
|
||||
objects,
|
||||
ctrlc: engine_state.ctrlc.clone(),
|
||||
};
|
||||
@ -126,10 +125,10 @@ MessagePack: https://msgpack.org/
|
||||
stdout: Some(raw_stream),
|
||||
..
|
||||
} => read_msgpack(ReadRawStream::new(raw_stream), opts),
|
||||
_ => Err(ShellError::PipelineMismatch {
|
||||
input => Err(ShellError::PipelineMismatch {
|
||||
exp_input_type: "binary".into(),
|
||||
dst_span: call.head,
|
||||
src_span: span,
|
||||
src_span: input.span().unwrap_or(call.head),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@ -257,7 +256,7 @@ pub(crate) fn read_msgpack(
|
||||
None
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(ctrlc))
|
||||
.into_pipeline_data(span, ctrlc))
|
||||
} else {
|
||||
// Read a single value and then make sure it's EOF
|
||||
let result = read_value(&mut input, span, 0)?;
|
||||
|
@ -197,15 +197,15 @@ fn from_document_to_value(d: &roxmltree::Document, info: &ParsingInfo) -> Value
|
||||
element_to_value(&d.root_element(), info)
|
||||
}
|
||||
|
||||
fn from_xml_string_to_value(s: String, info: &ParsingInfo) -> Result<Value, roxmltree::Error> {
|
||||
let parsed = roxmltree::Document::parse(&s)?;
|
||||
fn from_xml_string_to_value(s: &str, info: &ParsingInfo) -> Result<Value, roxmltree::Error> {
|
||||
let parsed = roxmltree::Document::parse(s)?;
|
||||
Ok(from_document_to_value(&parsed, info))
|
||||
}
|
||||
|
||||
fn from_xml(input: PipelineData, info: &ParsingInfo) -> Result<PipelineData, ShellError> {
|
||||
let (concat_string, span, metadata) = input.collect_string_strict(info.span)?;
|
||||
|
||||
match from_xml_string_to_value(concat_string, info) {
|
||||
match from_xml_string_to_value(&concat_string, info) {
|
||||
Ok(x) => Ok(x.into_pipeline_data_with_metadata(metadata)),
|
||||
Err(err) => Err(process_xml_parse_error(err, span)),
|
||||
}
|
||||
@ -370,7 +370,7 @@ mod tests {
|
||||
keep_comments: false,
|
||||
keep_processing_instructions: false,
|
||||
};
|
||||
from_xml_string_to_value(xml.to_string(), &info)
|
||||
from_xml_string_to_value(xml, &info)
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -185,14 +185,10 @@ fn convert_yaml_value_to_nu_value(
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_yaml_string_to_value(
|
||||
s: String,
|
||||
span: Span,
|
||||
val_span: Span,
|
||||
) -> Result<Value, ShellError> {
|
||||
pub fn from_yaml_string_to_value(s: &str, span: Span, val_span: Span) -> Result<Value, ShellError> {
|
||||
let mut documents = vec![];
|
||||
|
||||
for document in serde_yaml::Deserializer::from_str(&s) {
|
||||
for document in serde_yaml::Deserializer::from_str(s) {
|
||||
let v: serde_yaml::Value =
|
||||
serde_yaml::Value::deserialize(document).map_err(|x| ShellError::UnsupportedInput {
|
||||
msg: format!("Could not load YAML: {x}"),
|
||||
@ -238,7 +234,7 @@ pub fn get_examples() -> Vec<Example<'static>> {
|
||||
fn from_yaml(input: PipelineData, head: Span) -> Result<PipelineData, ShellError> {
|
||||
let (concat_string, span, metadata) = input.collect_string_strict(head)?;
|
||||
|
||||
match from_yaml_string_to_value(concat_string, head, span) {
|
||||
match from_yaml_string_to_value(&concat_string, head, span) {
|
||||
Ok(x) => Ok(x.into_pipeline_data_with_metadata(metadata)),
|
||||
Err(other) => Err(other),
|
||||
}
|
||||
@ -274,11 +270,7 @@ mod test {
|
||||
];
|
||||
let config = Config::default();
|
||||
for tc in tt {
|
||||
let actual = from_yaml_string_to_value(
|
||||
tc.input.to_owned(),
|
||||
Span::test_data(),
|
||||
Span::test_data(),
|
||||
);
|
||||
let actual = from_yaml_string_to_value(tc.input, Span::test_data(), Span::test_data());
|
||||
if actual.is_err() {
|
||||
assert!(
|
||||
tc.expected.is_err(),
|
||||
@ -313,11 +305,7 @@ mod test {
|
||||
// table was non-deterministic. It would take a few executions of the YAML conversion to
|
||||
// see this ordering difference. This loop should be far more than enough to catch a regression.
|
||||
for ii in 1..1000 {
|
||||
let actual = from_yaml_string_to_value(
|
||||
String::from(test_yaml),
|
||||
Span::test_data(),
|
||||
Span::test_data(),
|
||||
);
|
||||
let actual = from_yaml_string_to_value(test_yaml, Span::test_data(), Span::test_data());
|
||||
|
||||
let expected: Result<Value, ShellError> = Ok(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use chrono_humanize::HumanTime;
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{format_duration, format_filesize_from_conf, Config, ListStream, RawStream};
|
||||
use nu_protocol::{format_duration, format_filesize_from_conf, Config, RawStream, ValueIterator};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ToText;
|
||||
@ -41,7 +41,7 @@ impl Command for ToText {
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout: Some(RawStream::new(
|
||||
Box::new(ListStreamIterator {
|
||||
stream,
|
||||
stream: stream.into_inner(),
|
||||
separator: line_ending.into(),
|
||||
config: config.clone(),
|
||||
}),
|
||||
@ -86,7 +86,7 @@ impl Command for ToText {
|
||||
}
|
||||
|
||||
struct ListStreamIterator {
|
||||
stream: ListStream,
|
||||
stream: ValueIterator,
|
||||
separator: String,
|
||||
config: Config,
|
||||
}
|
||||
|
@ -183,7 +183,7 @@ used as the next argument to the closure, otherwise generation stops.
|
||||
|
||||
Ok(iter
|
||||
.flatten()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(call.head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::ListStream;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Seq;
|
||||
@ -119,36 +120,32 @@ pub fn run_seq(
|
||||
let step = if free.len() > 2 { free[1] } else { 1.0 };
|
||||
let last = { free[free.len() - 1] };
|
||||
|
||||
if !contains_decimals {
|
||||
// integers only
|
||||
Ok(PipelineData::ListStream(
|
||||
nu_protocol::ListStream::from_stream(
|
||||
IntSeq {
|
||||
count: first as i64,
|
||||
step: step as i64,
|
||||
last: last as i64,
|
||||
span,
|
||||
},
|
||||
engine_state.ctrlc.clone(),
|
||||
),
|
||||
None,
|
||||
))
|
||||
let stream = if !contains_decimals {
|
||||
ListStream::new(
|
||||
IntSeq {
|
||||
count: first as i64,
|
||||
step: step as i64,
|
||||
last: last as i64,
|
||||
span,
|
||||
},
|
||||
span,
|
||||
engine_state.ctrlc.clone(),
|
||||
)
|
||||
} else {
|
||||
// floats
|
||||
Ok(PipelineData::ListStream(
|
||||
nu_protocol::ListStream::from_stream(
|
||||
FloatSeq {
|
||||
first,
|
||||
step,
|
||||
last,
|
||||
index: 0,
|
||||
span,
|
||||
},
|
||||
engine_state.ctrlc.clone(),
|
||||
),
|
||||
None,
|
||||
))
|
||||
}
|
||||
ListStream::new(
|
||||
FloatSeq {
|
||||
first,
|
||||
step,
|
||||
last,
|
||||
index: 0,
|
||||
span,
|
||||
},
|
||||
span,
|
||||
engine_state.ctrlc.clone(),
|
||||
)
|
||||
};
|
||||
|
||||
Ok(stream.into())
|
||||
}
|
||||
|
||||
struct FloatSeq {
|
||||
|
@ -92,17 +92,12 @@ pub fn help_aliases(
|
||||
&highlight_style,
|
||||
)?;
|
||||
|
||||
return Ok(found_cmds_vec
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()));
|
||||
return Ok(Value::list(found_cmds_vec, head).into_pipeline_data());
|
||||
}
|
||||
|
||||
if rest.is_empty() {
|
||||
let found_cmds_vec = build_help_aliases(engine_state, stack, head);
|
||||
|
||||
Ok(found_cmds_vec
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
Ok(Value::list(found_cmds_vec, head).into_pipeline_data())
|
||||
} else {
|
||||
let mut name = String::new();
|
||||
|
||||
|
@ -72,17 +72,12 @@ pub fn help_commands(
|
||||
&highlight_style,
|
||||
)?;
|
||||
|
||||
return Ok(found_cmds_vec
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()));
|
||||
return Ok(Value::list(found_cmds_vec, head).into_pipeline_data());
|
||||
}
|
||||
|
||||
if rest.is_empty() {
|
||||
let found_cmds_vec = build_help_commands(engine_state, head);
|
||||
|
||||
Ok(found_cmds_vec
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
Ok(Value::list(found_cmds_vec, head).into_pipeline_data())
|
||||
} else {
|
||||
let mut name = String::new();
|
||||
|
||||
|
@ -21,7 +21,7 @@ impl Command for HelpEscapes {
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
_engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
@ -40,9 +40,7 @@ impl Command for HelpEscapes {
|
||||
));
|
||||
}
|
||||
|
||||
Ok(recs
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
Ok(Value::list(recs, call.head).into_pipeline_data())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,17 +92,12 @@ pub fn help_externs(
|
||||
&highlight_style,
|
||||
)?;
|
||||
|
||||
return Ok(found_cmds_vec
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()));
|
||||
return Ok(Value::list(found_cmds_vec, head).into_pipeline_data());
|
||||
}
|
||||
|
||||
if rest.is_empty() {
|
||||
let found_cmds_vec = build_help_externs(engine_state, stack, head);
|
||||
|
||||
Ok(found_cmds_vec
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
Ok(Value::list(found_cmds_vec, head).into_pipeline_data())
|
||||
} else {
|
||||
let mut name = String::new();
|
||||
|
||||
|
@ -98,17 +98,12 @@ pub fn help_modules(
|
||||
&highlight_style,
|
||||
)?;
|
||||
|
||||
return Ok(found_cmds_vec
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()));
|
||||
return Ok(Value::list(found_cmds_vec, head).into_pipeline_data());
|
||||
}
|
||||
|
||||
if rest.is_empty() {
|
||||
let found_cmds_vec = build_help_modules(engine_state, stack, head);
|
||||
|
||||
Ok(found_cmds_vec
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
Ok(Value::list(found_cmds_vec, head).into_pipeline_data())
|
||||
} else {
|
||||
let mut name = String::new();
|
||||
|
||||
|
@ -21,7 +21,7 @@ impl Command for HelpOperators {
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
_engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
@ -43,9 +43,7 @@ impl Command for HelpOperators {
|
||||
));
|
||||
}
|
||||
|
||||
Ok(recs
|
||||
.into_iter()
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
Ok(Value::list(recs, head).into_pipeline_data())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -69,7 +69,7 @@ pub fn calculate(
|
||||
let span = values.span().unwrap_or(name);
|
||||
match values {
|
||||
PipelineData::ListStream(s, ..) => {
|
||||
helper_for_tables(&s.collect::<Vec<Value>>(), span, name, mf)
|
||||
helper_for_tables(&s.into_iter().collect::<Vec<Value>>(), span, name, mf)
|
||||
}
|
||||
PipelineData::Value(Value::List { ref vals, .. }, ..) => match &vals[..] {
|
||||
[Value::Record { .. }, _end @ ..] => helper_for_tables(
|
||||
|
@ -660,7 +660,7 @@ Operating system commands:
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
if list {
|
||||
return generate_ansi_code_list(ctrlc, call.head, use_ansi_coloring);
|
||||
return Ok(generate_ansi_code_list(ctrlc, call.head, use_ansi_coloring));
|
||||
}
|
||||
|
||||
// The code can now be one of the ansi abbreviations like green_bold
|
||||
@ -694,7 +694,7 @@ Operating system commands:
|
||||
let ctrlc = working_set.permanent().ctrlc.clone();
|
||||
|
||||
if list {
|
||||
return generate_ansi_code_list(ctrlc, call.head, use_ansi_coloring);
|
||||
return Ok(generate_ansi_code_list(ctrlc, call.head, use_ansi_coloring));
|
||||
}
|
||||
|
||||
// The code can now be one of the ansi abbreviations like green_bold
|
||||
@ -833,8 +833,8 @@ fn generate_ansi_code_list(
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
call_span: Span,
|
||||
use_ansi_coloring: bool,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
return Ok(CODE_LIST
|
||||
) -> PipelineData {
|
||||
CODE_LIST
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(move |(i, ansi_code)| {
|
||||
@ -865,7 +865,7 @@ fn generate_ansi_code_list(
|
||||
|
||||
Value::record(record, call_span)
|
||||
})
|
||||
.into_pipeline_data(ctrlc));
|
||||
.into_pipeline_data(call_span, ctrlc)
|
||||
}
|
||||
|
||||
fn build_ansi_hashmap(v: &[AnsiCode]) -> HashMap<&str, &str> {
|
||||
|
@ -78,10 +78,7 @@ fn dice(
|
||||
Value::int(thread_rng.gen_range(1..sides + 1) as i64, span)
|
||||
});
|
||||
|
||||
Ok(PipelineData::ListStream(
|
||||
ListStream::from_stream(iter, engine_state.ctrlc.clone()),
|
||||
None,
|
||||
))
|
||||
Ok(ListStream::new(iter, span, engine_state.ctrlc.clone()).into())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -230,7 +230,7 @@ impl Command for Char {
|
||||
|
||||
// handle -l flag
|
||||
if list {
|
||||
return generate_character_list(ctrlc, call.head);
|
||||
return Ok(generate_character_list(ctrlc, call.head));
|
||||
}
|
||||
|
||||
// handle -i flag
|
||||
@ -265,7 +265,7 @@ impl Command for Char {
|
||||
|
||||
// handle -l flag
|
||||
if list {
|
||||
return generate_character_list(ctrlc, call_span);
|
||||
return Ok(generate_character_list(ctrlc, call_span));
|
||||
}
|
||||
|
||||
// handle -i flag
|
||||
@ -286,11 +286,8 @@ impl Command for Char {
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_character_list(
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
call_span: Span,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
Ok(CHAR_MAP
|
||||
fn generate_character_list(ctrlc: Option<Arc<AtomicBool>>, call_span: Span) -> PipelineData {
|
||||
CHAR_MAP
|
||||
.iter()
|
||||
.map(move |(name, s)| {
|
||||
let unicode = Value::string(
|
||||
@ -308,7 +305,7 @@ fn generate_character_list(
|
||||
|
||||
Value::record(record, call_span)
|
||||
})
|
||||
.into_pipeline_data(ctrlc))
|
||||
.into_pipeline_data(call_span, ctrlc)
|
||||
}
|
||||
|
||||
fn handle_integer_flag(
|
||||
|
@ -159,7 +159,7 @@ fn guess_width(
|
||||
Err(e) => Value::error(e, input_span),
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(input_span, engine_state.ctrlc.clone()))
|
||||
} else {
|
||||
let length = result[0].len();
|
||||
let columns: Vec<String> = (0..length).map(|n| format!("column{n}")).collect();
|
||||
@ -184,7 +184,7 @@ fn guess_width(
|
||||
Err(e) => Value::error(e, input_span),
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(input_span, engine_state.ctrlc.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
@ -278,7 +278,7 @@ fn detect_columns(
|
||||
None => Value::record(record, name_span),
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(ctrlc))
|
||||
.into_pipeline_data(call.head, ctrlc))
|
||||
} else {
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
|
@ -281,7 +281,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn base64_decode_binhex() {
|
||||
let word = Value::test_string("A5\"KC9jRB@IIF'8bF!");
|
||||
let word = Value::test_string(r#"B5"LD@jSCAJJG'9cG!"#);
|
||||
let expected = Value::binary(b"a binhex test".as_slice(), Span::test_data());
|
||||
|
||||
let actual = action(
|
||||
@ -304,7 +304,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn base64_decode_binhex_with_new_line_input() {
|
||||
let word = Value::test_string("A5\"KC9jRB\n@IIF'8bF!");
|
||||
let word = Value::test_string("B5\"LD@jSC\nAJJG'9cG!");
|
||||
let expected = Value::binary(b"a binhex test".as_slice(), Span::test_data());
|
||||
|
||||
let actual = action(
|
||||
|
@ -65,7 +65,7 @@ impl Command for EncodeBase64 {
|
||||
Example {
|
||||
description: "Encode a string with the binhex character set",
|
||||
example: "'Some Data' | encode base64 --character-set binhex",
|
||||
result: Some(Value::test_string(r#"7epXB5"%A@4J"#)),
|
||||
result: Some(Value::test_string(r#"8fpYC5"%BA4K"#)),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user