Add and use new Signals struct (#13314)

# Description
This PR introduces a new `Signals` struct to replace our adhoc passing
around of `ctrlc: Option<Arc<AtomicBool>>`. Doing so has a few benefits:
- We can better enforce when/where resetting or triggering an interrupt
is allowed.
- Consolidates `nu_utils::ctrl_c::was_pressed` and other ad-hoc
re-implementations into a single place: `Signals::check`.
- This allows us to add other types of signals later if we want. E.g.,
exiting or suspension.
- Similarly, we can more easily change the underlying implementation if
we need to in the future.
- Places that used to have a `ctrlc` of `None` now use
`Signals::empty()`, so we can double check these usages for correctness
in the future.
This commit is contained in:
Ian Manske 2024-07-07 22:29:01 +00:00 committed by GitHub
parent c6b6b1b7a8
commit 399a7c8836
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
246 changed files with 1332 additions and 1234 deletions

View File

@ -4,11 +4,14 @@ use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
use nu_protocol::{
engine::{EngineState, Stack},
PipelineData, Span, Spanned, Value,
PipelineData, Signals, Span, Spanned, Value,
};
use nu_std::load_standard_library;
use nu_utils::{get_default_config, get_default_env};
use std::rc::Rc;
use std::{
rc::Rc,
sync::{atomic::AtomicBool, Arc},
};
use std::hint::black_box;
@ -248,14 +251,12 @@ fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
)
}
fn bench_eval_interleave_with_ctrlc(n: i32) -> impl IntoBenchmarks {
fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks {
let mut engine = setup_engine();
engine.ctrlc = Some(std::sync::Arc::new(std::sync::atomic::AtomicBool::new(
false,
)));
engine.set_signals(Signals::new(Arc::new(AtomicBool::new(false))));
let stack = Stack::new();
bench_command(
&format!("eval_interleave_with_ctrlc_{n}"),
&format!("eval_interleave_with_interrupt_{n}"),
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
stack,
engine,
@ -443,9 +444,9 @@ tango_benchmarks!(
bench_eval_interleave(100),
bench_eval_interleave(1_000),
bench_eval_interleave(10_000),
bench_eval_interleave_with_ctrlc(100),
bench_eval_interleave_with_ctrlc(1_000),
bench_eval_interleave_with_ctrlc(10_000),
bench_eval_interleave_with_interrupt(100),
bench_eval_interleave_with_interrupt(1_000),
bench_eval_interleave_with_interrupt(10_000),
// For
bench_eval_for(1),
bench_eval_for(10),

View File

@ -47,7 +47,7 @@ impl Command for History {
if let Some(config_path) = nu_path::config_dir() {
let clear = call.has_flag(engine_state, stack, "clear")?;
let long = call.has_flag(engine_state, stack, "long")?;
let ctrlc = engine_state.ctrlc.clone();
let signals = engine_state.signals().clone();
let mut history_path = config_path;
history_path.push("nushell");
@ -107,7 +107,7 @@ impl Command for History {
file: history_path.display().to_string(),
span: head,
})?
.into_pipeline_data(head, ctrlc)),
.into_pipeline_data(head, signals)),
HistoryFileFormat::Sqlite => Ok(history_reader
.and_then(|h| {
h.search(SearchQuery::everything(SearchDirection::Forward, None))
@ -122,7 +122,7 @@ impl Command for History {
file: history_path.display().to_string(),
span: head,
})?
.into_pipeline_data(head, ctrlc)),
.into_pipeline_data(head, signals)),
}
}
} else {

View File

@ -32,7 +32,7 @@ impl Command for NuHighlight {
) -> Result<PipelineData, ShellError> {
let head = call.head;
let ctrlc = engine_state.ctrlc.clone();
let signals = engine_state.signals();
let engine_state = std::sync::Arc::new(engine_state.clone());
let config = engine_state.get_config().clone();
@ -50,7 +50,7 @@ impl Command for NuHighlight {
}
Err(err) => Value::error(err, head),
},
ctrlc,
signals,
)
}

View File

@ -43,7 +43,7 @@ use std::{
io::{self, IsTerminal, Write},
panic::{catch_unwind, AssertUnwindSafe},
path::{Path, PathBuf},
sync::{atomic::Ordering, Arc},
sync::Arc,
time::{Duration, Instant},
};
use sysinfo::System;
@ -271,11 +271,8 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
perf!("merge env", start_time, use_color);
start_time = std::time::Instant::now();
// Reset the ctrl-c handler
if let Some(ctrlc) = &mut engine_state.ctrlc {
ctrlc.store(false, Ordering::SeqCst);
}
perf!("reset ctrlc", start_time, use_color);
engine_state.reset_signals();
perf!("reset signals", start_time, use_color);
start_time = std::time::Instant::now();
// Right before we start our prompt and take input from the user,

View File

@ -1,5 +1,5 @@
use nu_protocol::{ast::CellPath, PipelineData, ShellError, Span, Value};
use std::sync::{atomic::AtomicBool, Arc};
use nu_protocol::{ast::CellPath, PipelineData, ShellError, Signals, Span, Value};
use std::sync::Arc;
pub trait CmdArgument {
fn take_cell_paths(&mut self) -> Option<Vec<CellPath>>;
@ -40,7 +40,7 @@ pub fn operate<C, A>(
mut arg: A,
input: PipelineData,
span: Span,
ctrlc: Option<Arc<AtomicBool>>,
signals: &Signals,
) -> Result<PipelineData, ShellError>
where
A: CmdArgument + Send + Sync + 'static,
@ -55,7 +55,7 @@ where
_ => cmd(&v, &arg, span),
}
},
ctrlc,
signals,
),
Some(column_paths) => {
let arg = Arc::new(arg);
@ -79,7 +79,7 @@ where
}
v
},
ctrlc,
signals,
)
}
}

View File

@ -79,7 +79,7 @@ impl Command for BitsAnd {
input.map(
move |value| binary_op(&value, &target, little_endian, |(l, r)| l & r, head),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -3,6 +3,7 @@ use std::io::{self, Read, Write};
use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*;
use nu_protocol::Signals;
use num_traits::ToPrimitive;
pub struct Arguments {
@ -127,31 +128,36 @@ fn into_bits(
))
} else {
let args = Arguments { cell_paths };
operate(action, args, input, call.head, engine_state.ctrlc.clone())
operate(action, args, input, call.head, engine_state.signals())
}
}
fn byte_stream_to_bits(stream: ByteStream, head: Span) -> ByteStream {
if let Some(mut reader) = stream.reader() {
let mut is_first = true;
ByteStream::from_fn(head, None, ByteStreamType::String, move |buffer| {
let mut byte = [0];
if reader.read(&mut byte[..]).err_span(head)? > 0 {
// Format the byte as bits
if is_first {
is_first = false;
ByteStream::from_fn(
head,
Signals::empty(),
ByteStreamType::String,
move |buffer| {
let mut byte = [0];
if reader.read(&mut byte[..]).err_span(head)? > 0 {
// Format the byte as bits
if is_first {
is_first = false;
} else {
buffer.push(b' ');
}
write!(buffer, "{:08b}", byte[0]).expect("format failed");
Ok(true)
} else {
buffer.push(b' ');
// EOF
Ok(false)
}
write!(buffer, "{:08b}", byte[0]).expect("format failed");
Ok(true)
} else {
// EOF
Ok(false)
}
})
},
)
} else {
ByteStream::read(io::empty(), head, None, ByteStreamType::String)
ByteStream::read(io::empty(), head, Signals::empty(), ByteStreamType::String)
}
}

View File

@ -82,7 +82,7 @@ impl Command for BitsNot {
number_size,
};
operate(action, args, input, head, engine_state.ctrlc.clone())
operate(action, args, input, head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -80,7 +80,7 @@ impl Command for BitsOr {
input.map(
move |value| binary_op(&value, &target, little_endian, |(l, r)| l | r, head),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -86,7 +86,7 @@ impl Command for BitsRol {
bits,
};
operate(action, args, input, head, engine_state.ctrlc.clone())
operate(action, args, input, head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -86,7 +86,7 @@ impl Command for BitsRor {
bits,
};
operate(action, args, input, head, engine_state.ctrlc.clone())
operate(action, args, input, head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -88,7 +88,7 @@ impl Command for BitsShl {
bits,
};
operate(action, args, input, head, engine_state.ctrlc.clone())
operate(action, args, input, head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -88,7 +88,7 @@ impl Command for BitsShr {
bits,
};
operate(action, args, input, head, engine_state.ctrlc.clone())
operate(action, args, input, head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -80,7 +80,7 @@ impl Command for BitsXor {
input.map(
move |value| binary_op(&value, &target, little_endian, |(l, r)| l ^ r, head),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -59,7 +59,7 @@ fn fmt(
) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let args = CellPathOnlyArgs::from(cell_paths);
operate(action, args, input, call.head, engine_state.ctrlc.clone())
operate(action, args, input, call.head, engine_state.signals())
}
fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {

View File

@ -89,7 +89,7 @@ impl Command for EachWhile {
}
})
.fuse()
.into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.signals().clone()))
}
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
@ -107,7 +107,7 @@ impl Command for EachWhile {
}
})
.fuse()
.into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.signals().clone()))
} else {
Ok(PipelineData::Empty)
}

View File

@ -108,7 +108,7 @@ impl Command for UpdateCells {
columns,
span: head,
}
.into_pipeline_data(head, engine_state.ctrlc.clone())
.into_pipeline_data(head, engine_state.signals().clone())
.set_metadata(metadata))
}
}

View File

@ -45,7 +45,7 @@ impl Command for SubCommand {
}
input.map(
move |value| operate(value, head, use_degrees),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -41,10 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(
move |value| operate(value, head),
engine_state.ctrlc.clone(),
)
input.map(move |value| operate(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -45,7 +45,7 @@ impl Command for SubCommand {
}
input.map(
move |value| operate(value, head, use_degrees),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -41,10 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(
move |value| operate(value, head),
engine_state.ctrlc.clone(),
)
input.map(move |value| operate(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -45,7 +45,7 @@ impl Command for SubCommand {
}
input.map(
move |value| operate(value, head, use_degrees),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -41,10 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(
move |value| operate(value, head),
engine_state.ctrlc.clone(),
)
input.map(move |value| operate(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -44,7 +44,7 @@ impl Command for SubCommand {
}
input.map(
move |value| operate(value, head, use_degrees),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -41,10 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(
move |value| operate(value, head),
engine_state.ctrlc.clone(),
)
input.map(move |value| operate(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -41,10 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(
move |value| operate(value, head),
engine_state.ctrlc.clone(),
)
input.map(move |value| operate(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -41,10 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(
move |value| operate(value, head),
engine_state.ctrlc.clone(),
)
input.map(move |value| operate(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -44,7 +44,7 @@ impl Command for SubCommand {
}
input.map(
move |value| operate(value, head, use_degrees),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -41,10 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(
move |value| operate(value, head),
engine_state.ctrlc.clone(),
)
input.map(move |value| operate(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -44,7 +44,7 @@ impl Command for SubCommand {
}
input.map(
move |value| operate(value, head, use_degrees),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -41,10 +41,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(
move |value| operate(value, head),
engine_state.ctrlc.clone(),
)
input.map(move |value| operate(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -140,7 +140,7 @@ fn operate(
ret
}
},
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -88,7 +88,7 @@ pub fn operate(
cell_paths,
};
general_operate(action, args, input, call.head, engine_state.ctrlc.clone())
general_operate(action, args, input, call.head, engine_state.signals())
}
fn action(

View File

@ -220,7 +220,7 @@ fn format(
}
}
Ok(ListStream::new(list.into_iter(), head_span, engine_state.ctrlc.clone()).into())
Ok(ListStream::new(list.into_iter(), head_span, engine_state.signals().clone()).into())
}
// Unwrapping this ShellError is a bit unfortunate.
// Ideally, its Span would be preserved.

View File

@ -44,7 +44,7 @@ where
case_operation,
cell_paths,
};
general_operate(action, args, input, call.head, engine_state.ctrlc.clone())
general_operate(action, args, input, call.head, engine_state.signals())
}
fn action<F>(input: &Value, args: &Arguments<F>, head: Span) -> Value

View File

@ -1,5 +1,5 @@
use nu_engine::{command_prelude::*, get_eval_block, get_eval_expression};
use nu_protocol::engine::CommandType;
use nu_protocol::{engine::CommandType, Signals};
#[derive(Clone)]
pub struct For;
@ -72,7 +72,6 @@ impl Command for For {
let value = eval_expression(engine_state, stack, keyword_expr)?;
let ctrlc = engine_state.ctrlc.clone();
let engine_state = engine_state.clone();
let block = engine_state.get_block(block_id);
@ -82,9 +81,7 @@ impl Command for For {
match value {
Value::List { vals, .. } => {
for x in vals.into_iter() {
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
break;
}
engine_state.signals().check(head)?;
// with_env() is used here to ensure that each iteration uses
// a different set of environment variables.
@ -116,7 +113,8 @@ impl Command for For {
}
}
Value::Range { val, .. } => {
for x in val.into_range_iter(span, ctrlc) {
for x in val.into_range_iter(span, Signals::empty()) {
engine_state.signals().check(head)?;
stack.add_var(var_id, x);
match eval_block(&engine_state, stack, block, PipelineData::empty()) {

View File

@ -37,6 +37,7 @@ impl Command for Loop {
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
let block_id = call
.positional_nth(0)
.expect("checked through parser")
@ -49,9 +50,7 @@ impl Command for Loop {
let stack = &mut stack.push_redirection(None, None);
loop {
if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) {
break;
}
engine_state.signals().check(head)?;
match eval_block(engine_state, stack, block, PipelineData::empty()) {
Err(ShellError::Break { .. }) => {

View File

@ -46,6 +46,7 @@ impl Command for While {
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
let cond = call.positional_nth(0).expect("checked through parser");
let block_id = call
.positional_nth(1)
@ -59,9 +60,7 @@ impl Command for While {
let stack = &mut stack.push_redirection(None, None);
loop {
if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) {
break;
}
engine_state.signals().check(head)?;
let result = eval_expression(engine_state, stack, cond)?;

View File

@ -78,7 +78,7 @@ impl Command for BytesAdd {
end,
cell_paths,
};
operate(add, arg, input, call.head, engine_state.ctrlc.clone())
operate(add, arg, input, call.head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -83,7 +83,7 @@ impl Command for BytesAt {
cell_paths,
};
operate(action, args, input, call.head, engine_state.ctrlc.clone())
operate(action, args, input, call.head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -60,7 +60,12 @@ impl Command for BytesCollect {
)
.flatten();
let output = ByteStream::from_result_iter(iter, span, None, ByteStreamType::Binary);
let output = ByteStream::from_result_iter(
iter,
span,
engine_state.signals().clone(),
ByteStreamType::Binary,
);
Ok(PipelineData::ByteStream(output, metadata))
}

View File

@ -102,7 +102,7 @@ impl Command for BytesEndsWith {
pattern,
cell_paths,
};
operate(ends_with, arg, input, head, engine_state.ctrlc.clone())
operate(ends_with, arg, input, head, engine_state.signals())
}
}

View File

@ -71,7 +71,7 @@ impl Command for BytesIndexOf {
all: call.has_flag(engine_state, stack, "all")?,
cell_paths,
};
operate(index_of, arg, input, call.head, engine_state.ctrlc.clone())
operate(index_of, arg, input, call.head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -46,7 +46,7 @@ impl Command for BytesLen {
) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 1)?;
let arg = CellPathOnlyArgs::from(cell_paths);
operate(length, arg, input, call.head, engine_state.ctrlc.clone())
operate(length, arg, input, call.head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -73,7 +73,7 @@ impl Command for BytesRemove {
all: call.has_flag(engine_state, stack, "all")?,
};
operate(remove, arg, input, call.head, engine_state.ctrlc.clone())
operate(remove, arg, input, call.head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -73,7 +73,7 @@ impl Command for BytesReplace {
all: call.has_flag(engine_state, stack, "all")?,
};
operate(replace, arg, input, call.head, engine_state.ctrlc.clone())
operate(replace, arg, input, call.head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -42,7 +42,7 @@ impl Command for BytesReverse {
) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let arg = CellPathOnlyArgs::from(cell_paths);
operate(reverse, arg, input, call.head, engine_state.ctrlc.clone())
operate(reverse, arg, input, call.head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -79,7 +79,7 @@ impl Command for BytesStartsWith {
pattern,
cell_paths,
};
operate(starts_with, arg, input, head, engine_state.ctrlc.clone())
operate(starts_with, arg, input, head, engine_state.signals())
}
}

View File

@ -165,7 +165,7 @@ fn fill(
cell_paths,
};
operate(action, arg, input, call.head, engine_state.ctrlc.clone())
operate(action, arg, input, call.head, engine_state.signals())
}
fn action(input: &Value, args: &Arguments, span: Span) -> Value {

View File

@ -138,7 +138,7 @@ fn into_binary(
cell_paths,
compact: call.has_flag(engine_state, stack, "compact")?,
};
operate(action, args, input, head, engine_state.ctrlc.clone())
operate(action, args, input, head, engine_state.signals())
}
}

View File

@ -107,7 +107,7 @@ fn into_bool(
) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let args = CellPathOnlyArgs::from(cell_paths);
operate(action, args, input, call.head, engine_state.ctrlc.clone())
operate(action, args, input, call.head, engine_state.signals())
}
fn string_to_boolean(s: &str, span: Span) -> Result<bool, ShellError> {

View File

@ -141,7 +141,7 @@ impl Command for SubCommand {
zone_options,
cell_paths,
};
operate(action, args, input, call.head, engine_state.ctrlc.clone())
operate(action, args, input, call.head, engine_state.signals())
}
}

View File

@ -166,7 +166,7 @@ fn into_duration(
ret
}
},
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -68,7 +68,7 @@ impl Command for SubCommand {
) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let args = CellPathOnlyArgs::from(cell_paths);
operate(action, args, input, call.head, engine_state.ctrlc.clone())
operate(action, args, input, call.head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -49,7 +49,7 @@ impl Command for SubCommand {
) -> Result<PipelineData, ShellError> {
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let args = CellPathOnlyArgs::from(cell_paths);
operate(action, args, input, call.head, engine_state.ctrlc.clone())
operate(action, args, input, call.head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -87,7 +87,7 @@ fn glob_helper(
Ok(Value::glob(stream.into_string()?, false, head).into_pipeline_data())
} else {
let args = Arguments { cell_paths };
operate(action, args, input, head, engine_state.ctrlc.clone())
operate(action, args, input, head, engine_state.signals())
}
}

View File

@ -158,7 +158,7 @@ impl Command for SubCommand {
signed,
cell_paths,
};
operate(action, args, input, call.head, engine_state.ctrlc.clone())
operate(action, args, input, call.head, engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -125,7 +125,7 @@ fn into_record(
),
},
Value::Range { val, .. } => Value::record(
val.into_range_iter(span, engine_state.ctrlc.clone())
val.into_range_iter(span, engine_state.signals().clone())
.enumerate()
.map(|(idx, val)| (format!("{idx}"), val))
.collect(),

View File

@ -180,7 +180,7 @@ fn string_helper(
cell_paths,
config,
};
operate(action, args, input, head, engine_state.ctrlc.clone())
operate(action, args, input, head, engine_state.signals())
}
}

View File

@ -57,14 +57,12 @@ impl Command for IntoValue {
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let engine_state = engine_state.clone();
let metadata = input.metadata();
let ctrlc = engine_state.ctrlc.clone();
let span = call.head;
let display_as_filesizes = call.has_flag(&engine_state, stack, "prefer-filesizes")?;
let display_as_filesizes = call.has_flag(engine_state, stack, "prefer-filesizes")?;
// the columns to update
let columns: Option<Value> = call.get_flag(&engine_state, stack, "columns")?;
let columns: Option<Value> = call.get_flag(engine_state, stack, "columns")?;
let columns: Option<HashSet<String>> = match columns {
Some(val) => Some(
val.into_list()?
@ -81,7 +79,7 @@ impl Command for IntoValue {
display_as_filesizes,
span,
}
.into_pipeline_data(span, ctrlc)
.into_pipeline_data(span, engine_state.signals().clone())
.set_metadata(metadata))
}
}

View File

@ -2,13 +2,8 @@ use crate::database::values::sqlite::{open_sqlite_db, values_to_sql};
use nu_engine::command_prelude::*;
use itertools::Itertools;
use std::{
path::Path,
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
};
use nu_protocol::Signals;
use std::path::Path;
pub const DEFAULT_TABLE_NAME: &str = "main";
@ -188,23 +183,18 @@ fn operate(
let file_name: Spanned<String> = call.req(engine_state, stack, 0)?;
let table_name: Option<Spanned<String>> = call.get_flag(engine_state, stack, "table-name")?;
let table = Table::new(&file_name, table_name)?;
let ctrl_c = engine_state.ctrlc.clone();
match action(input, table, span, ctrl_c) {
Ok(val) => Ok(val.into_pipeline_data()),
Err(e) => Err(e),
}
Ok(action(input, table, span, engine_state.signals())?.into_pipeline_data())
}
fn action(
input: PipelineData,
table: Table,
span: Span,
ctrl_c: Option<Arc<AtomicBool>>,
signals: &Signals,
) -> Result<Value, ShellError> {
match input {
PipelineData::ListStream(stream, _) => {
insert_in_transaction(stream.into_iter(), span, table, ctrl_c)
insert_in_transaction(stream.into_iter(), span, table, signals)
}
PipelineData::Value(
Value::List {
@ -212,9 +202,9 @@ fn action(
internal_span,
},
_,
) => insert_in_transaction(vals.into_iter(), internal_span, table, ctrl_c),
) => insert_in_transaction(vals.into_iter(), internal_span, table, signals),
PipelineData::Value(val, _) => {
insert_in_transaction(std::iter::once(val), span, table, ctrl_c)
insert_in_transaction(std::iter::once(val), span, table, signals)
}
_ => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list".into(),
@ -229,7 +219,7 @@ fn insert_in_transaction(
stream: impl Iterator<Item = Value>,
span: Span,
mut table: Table,
ctrl_c: Option<Arc<AtomicBool>>,
signals: &Signals,
) -> Result<Value, ShellError> {
let mut stream = stream.peekable();
let first_val = match stream.peek() {
@ -251,17 +241,15 @@ fn insert_in_transaction(
let tx = table.try_init(&first_val)?;
for stream_value in stream {
if let Some(ref ctrlc) = ctrl_c {
if ctrlc.load(Ordering::Relaxed) {
tx.rollback().map_err(|e| ShellError::GenericError {
error: "Failed to rollback SQLite transaction".into(),
msg: e.to_string(),
span: None,
help: None,
inner: Vec::new(),
})?;
return Err(ShellError::InterruptedByUser { span: None });
}
if let Err(err) = signals.check(span) {
tx.rollback().map_err(|e| ShellError::GenericError {
error: "Failed to rollback SQLite transaction".into(),
msg: e.to_string(),
span: None,
help: None,
inner: Vec::new(),
})?;
return Err(err);
}
let val = stream_value.as_record()?;

View File

@ -2,7 +2,7 @@ use super::definitions::{
db_column::DbColumn, db_constraint::DbConstraint, db_foreignkey::DbForeignKey,
db_index::DbIndex, db_table::DbTable,
};
use nu_protocol::{CustomValue, PipelineData, Record, ShellError, Span, Spanned, Value};
use nu_protocol::{CustomValue, PipelineData, Record, ShellError, Signals, Span, Spanned, Value};
use rusqlite::{
types::ValueRef, Connection, DatabaseName, Error as SqliteError, OpenFlags, Row, Statement,
ToSql,
@ -12,7 +12,6 @@ use std::{
fs::File,
io::Read,
path::{Path, PathBuf},
sync::{atomic::AtomicBool, Arc},
};
const SQLITE_MAGIC_BYTES: &[u8] = "SQLite format 3\0".as_bytes();
@ -24,25 +23,21 @@ pub struct SQLiteDatabase {
// 1) YAGNI, 2) it's not obvious how cloning a connection could work, 3) state
// management gets tricky quick. Revisit this approach if we find a compelling use case.
pub path: PathBuf,
#[serde(skip)]
#[serde(skip, default = "Signals::empty")]
// this understandably can't be serialized. think that's OK, I'm not aware of a
// reason why a CustomValue would be serialized outside of a plugin
ctrlc: Option<Arc<AtomicBool>>,
signals: Signals,
}
impl SQLiteDatabase {
pub fn new(path: &Path, ctrlc: Option<Arc<AtomicBool>>) -> Self {
pub fn new(path: &Path, signals: Signals) -> Self {
Self {
path: PathBuf::from(path),
ctrlc,
signals,
}
}
pub fn try_from_path(
path: &Path,
span: Span,
ctrlc: Option<Arc<AtomicBool>>,
) -> Result<Self, ShellError> {
pub fn try_from_path(path: &Path, span: Span, signals: Signals) -> Result<Self, ShellError> {
let mut file = File::open(path).map_err(|e| ShellError::ReadingFile {
msg: e.to_string(),
span,
@ -56,7 +51,7 @@ impl SQLiteDatabase {
})
.and_then(|_| {
if buf == SQLITE_MAGIC_BYTES {
Ok(SQLiteDatabase::new(path, ctrlc))
Ok(SQLiteDatabase::new(path, signals))
} else {
Err(ShellError::ReadingFile {
msg: "Not a SQLite file".into(),
@ -72,7 +67,7 @@ impl SQLiteDatabase {
Value::Custom { val, .. } => match val.as_any().downcast_ref::<Self>() {
Some(db) => Ok(Self {
path: db.path.clone(),
ctrlc: db.ctrlc.clone(),
signals: db.signals.clone(),
}),
None => Err(ShellError::CantConvert {
to_type: "database".into(),
@ -107,16 +102,8 @@ impl SQLiteDatabase {
call_span: Span,
) -> Result<Value, ShellError> {
let conn = open_sqlite_db(&self.path, call_span)?;
let stream = run_sql_query(conn, sql, params, self.ctrlc.clone()).map_err(|e| {
ShellError::GenericError {
error: "Failed to query SQLite database".into(),
msg: e.to_string(),
span: Some(sql.span),
help: None,
inner: vec![],
}
})?;
let stream = run_sql_query(conn, sql, params, &self.signals)
.map_err(|e| e.into_shell_error(sql.span, "Failed to query SQLite database"))?;
Ok(stream)
}
@ -352,12 +339,7 @@ impl SQLiteDatabase {
impl CustomValue for SQLiteDatabase {
fn clone_value(&self, span: Span) -> Value {
let cloned = SQLiteDatabase {
path: self.path.clone(),
ctrlc: self.ctrlc.clone(),
};
Value::custom(Box::new(cloned), span)
Value::custom(Box::new(self.clone()), span)
}
fn type_name(&self) -> String {
@ -366,13 +348,8 @@ impl CustomValue for SQLiteDatabase {
fn to_base_value(&self, span: Span) -> Result<Value, ShellError> {
let db = open_sqlite_db(&self.path, span)?;
read_entire_sqlite_db(db, span, self.ctrlc.clone()).map_err(|e| ShellError::GenericError {
error: "Failed to read from SQLite database".into(),
msg: e.to_string(),
span: Some(span),
help: None,
inner: vec![],
})
read_entire_sqlite_db(db, span, &self.signals)
.map_err(|e| e.into_shell_error(span, "Failed to read from SQLite database"))
}
fn as_any(&self) -> &dyn std::any::Any {
@ -396,20 +373,12 @@ impl CustomValue for SQLiteDatabase {
fn follow_path_string(
&self,
_self_span: Span,
_column_name: String,
column_name: String,
path_span: Span,
) -> Result<Value, ShellError> {
let db = open_sqlite_db(&self.path, path_span)?;
read_single_table(db, _column_name, path_span, self.ctrlc.clone()).map_err(|e| {
ShellError::GenericError {
error: "Failed to read from SQLite database".into(),
msg: e.to_string(),
span: Some(path_span),
help: None,
inner: vec![],
}
})
read_single_table(db, column_name, path_span, &self.signals)
.map_err(|e| e.into_shell_error(path_span, "Failed to read from SQLite database"))
}
fn typetag_name(&self) -> &'static str {
@ -426,12 +395,12 @@ pub fn open_sqlite_db(path: &Path, call_span: Span) -> Result<Connection, ShellE
open_connection_in_memory_custom()
} else {
let path = path.to_string_lossy().to_string();
Connection::open(path).map_err(|e| ShellError::GenericError {
Connection::open(path).map_err(|err| ShellError::GenericError {
error: "Failed to open SQLite database".into(),
msg: e.to_string(),
msg: err.to_string(),
span: Some(call_span),
help: None,
inner: vec![],
inner: Vec::new(),
})
}
}
@ -440,11 +409,10 @@ fn run_sql_query(
conn: Connection,
sql: &Spanned<String>,
params: NuSqlParams,
ctrlc: Option<Arc<AtomicBool>>,
) -> Result<Value, SqliteError> {
signals: &Signals,
) -> Result<Value, SqliteOrShellError> {
let stmt = conn.prepare(&sql.item)?;
prepared_statement_to_nu_list(stmt, params, sql.span, ctrlc)
prepared_statement_to_nu_list(stmt, params, sql.span, signals)
}
// This is taken from to text local_into_string but tweaks it a bit so that certain formatting does not happen
@ -534,23 +502,56 @@ pub fn nu_value_to_params(value: Value) -> Result<NuSqlParams, ShellError> {
}
}
#[derive(Debug)]
enum SqliteOrShellError {
SqliteError(SqliteError),
ShellError(ShellError),
}
impl From<SqliteError> for SqliteOrShellError {
fn from(error: SqliteError) -> Self {
Self::SqliteError(error)
}
}
impl From<ShellError> for SqliteOrShellError {
fn from(error: ShellError) -> Self {
Self::ShellError(error)
}
}
impl SqliteOrShellError {
fn into_shell_error(self, span: Span, msg: &str) -> ShellError {
match self {
Self::SqliteError(err) => ShellError::GenericError {
error: msg.into(),
msg: err.to_string(),
span: Some(span),
help: None,
inner: Vec::new(),
},
Self::ShellError(err) => err,
}
}
}
fn read_single_table(
conn: Connection,
table_name: String,
call_span: Span,
ctrlc: Option<Arc<AtomicBool>>,
) -> Result<Value, SqliteError> {
signals: &Signals,
) -> Result<Value, SqliteOrShellError> {
// TODO: Should use params here?
let stmt = conn.prepare(&format!("SELECT * FROM [{table_name}]"))?;
prepared_statement_to_nu_list(stmt, NuSqlParams::default(), call_span, ctrlc)
prepared_statement_to_nu_list(stmt, NuSqlParams::default(), call_span, signals)
}
fn prepared_statement_to_nu_list(
mut stmt: Statement,
params: NuSqlParams,
call_span: Span,
ctrlc: Option<Arc<AtomicBool>>,
) -> Result<Value, SqliteError> {
signals: &Signals,
) -> Result<Value, SqliteOrShellError> {
let column_names = stmt
.column_names()
.into_iter()
@ -576,11 +577,7 @@ fn prepared_statement_to_nu_list(
let mut row_values = vec![];
for row_result in row_results {
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
// return whatever we have so far, let the caller decide whether to use it
return Ok(Value::list(row_values, call_span));
}
signals.check(call_span)?;
if let Ok(row_value) = row_result {
row_values.push(row_value);
}
@ -606,11 +603,7 @@ fn prepared_statement_to_nu_list(
let mut row_values = vec![];
for row_result in row_results {
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
// return whatever we have so far, let the caller decide whether to use it
return Ok(Value::list(row_values, call_span));
}
signals.check(call_span)?;
if let Ok(row_value) = row_result {
row_values.push(row_value);
}
@ -626,8 +619,8 @@ fn prepared_statement_to_nu_list(
fn read_entire_sqlite_db(
conn: Connection,
call_span: Span,
ctrlc: Option<Arc<AtomicBool>>,
) -> Result<Value, SqliteError> {
signals: &Signals,
) -> Result<Value, SqliteOrShellError> {
let mut tables = Record::new();
let mut get_table_names =
@ -638,12 +631,8 @@ fn read_entire_sqlite_db(
let table_name: String = row?;
// TODO: Should use params here?
let table_stmt = conn.prepare(&format!("select * from [{table_name}]"))?;
let rows = prepared_statement_to_nu_list(
table_stmt,
NuSqlParams::default(),
call_span,
ctrlc.clone(),
)?;
let rows =
prepared_statement_to_nu_list(table_stmt, NuSqlParams::default(), call_span, signals)?;
tables.push(table_name, rows);
}
@ -710,7 +699,7 @@ mod test {
#[test]
fn can_read_empty_db() {
let db = open_connection_in_memory().unwrap();
let converted_db = read_entire_sqlite_db(db, Span::test_data(), None).unwrap();
let converted_db = read_entire_sqlite_db(db, Span::test_data(), &Signals::empty()).unwrap();
let expected = Value::test_record(Record::new());
@ -730,7 +719,7 @@ mod test {
[],
)
.unwrap();
let converted_db = read_entire_sqlite_db(db, Span::test_data(), None).unwrap();
let converted_db = read_entire_sqlite_db(db, Span::test_data(), &Signals::empty()).unwrap();
let expected = Value::test_record(record! {
"person" => Value::test_list(vec![]),
@ -759,7 +748,7 @@ mod test {
db.execute("INSERT INTO item (id, name) VALUES (456, 'foo bar')", [])
.unwrap();
let converted_db = read_entire_sqlite_db(db, span, None).unwrap();
let converted_db = read_entire_sqlite_db(db, span, &Signals::empty()).unwrap();
let expected = Value::test_record(record! {
"item" => Value::test_list(

View File

@ -50,7 +50,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(move |value| helper(value, head), engine_state.ctrlc.clone())
input.map(move |value| helper(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -40,7 +40,7 @@ impl Command for SubCommand {
head,
)
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.signals().clone()))
}
fn examples(&self) -> Vec<Example> {

View File

@ -40,7 +40,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(move |value| helper(value, head), engine_state.ctrlc.clone())
input.map(move |value| helper(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -40,7 +40,7 @@ impl Command for SubCommand {
if matches!(input, PipelineData::Empty) {
return Err(ShellError::PipelineEmpty { dst_span: head });
}
input.map(move |value| helper(value, head), engine_state.ctrlc.clone())
input.map(move |value| helper(value, head), engine_state.signals())
}
fn examples(&self) -> Vec<Example> {

View File

@ -55,7 +55,7 @@ impl Command for SubCommand {
}
input.map(
move |value| helper(value, head, &timezone),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -46,7 +46,7 @@ impl Command for Debug {
Value::string(x.to_expanded_string(", ", &config), head)
}
},
engine_state.ctrlc.clone(),
engine_state.signals(),
)
}

View File

@ -48,7 +48,7 @@ impl Command for MetadataSet {
let ds_fp: Option<String> = call.get_flag(engine_state, stack, "datasource-filepath")?;
let ds_ls = call.has_flag(engine_state, stack, "datasource-ls")?;
let content_type: Option<String> = call.get_flag(engine_state, stack, "content-type")?;
let signals = engine_state.signals().clone();
let metadata = input
.metadata()
.clone()
@ -58,19 +58,15 @@ impl Command for MetadataSet {
match (ds_fp, ds_ls) {
(Some(path), false) => Ok(input.into_pipeline_data_with_metadata(
head,
engine_state.ctrlc.clone(),
signals,
metadata.with_data_source(DataSource::FilePath(path.into())),
)),
(None, true) => Ok(input.into_pipeline_data_with_metadata(
head,
engine_state.ctrlc.clone(),
signals,
metadata.with_data_source(DataSource::Ls),
)),
_ => Ok(input.into_pipeline_data_with_metadata(
head,
engine_state.ctrlc.clone(),
metadata,
)),
_ => Ok(input.into_pipeline_data_with_metadata(head, signals, metadata)),
}
}

View File

@ -3,10 +3,9 @@ use crate::{DirBuilder, DirInfo, FileInfo};
#[allow(deprecated)]
use nu_engine::{command_prelude::*, current_dir};
use nu_glob::Pattern;
use nu_protocol::NuGlob;
use nu_protocol::{NuGlob, Signals};
use serde::Deserialize;
use std::path::Path;
use std::sync::{atomic::AtomicBool, Arc};
#[derive(Clone)]
pub struct Du;
@ -120,8 +119,8 @@ impl Command for Du {
min_size,
};
Ok(
du_for_one_pattern(args, &current_dir, tag, engine_state.ctrlc.clone())?
.into_pipeline_data(tag, engine_state.ctrlc.clone()),
du_for_one_pattern(args, &current_dir, tag, engine_state.signals())?
.into_pipeline_data(tag, engine_state.signals().clone()),
)
}
Some(paths) => {
@ -139,7 +138,7 @@ impl Command for Du {
args,
&current_dir,
tag,
engine_state.ctrlc.clone(),
engine_state.signals(),
)?)
}
@ -147,7 +146,7 @@ impl Command for Du {
Ok(result_iters
.into_iter()
.flatten()
.into_pipeline_data(tag, engine_state.ctrlc.clone()))
.into_pipeline_data(tag, engine_state.signals().clone()))
}
}
}
@ -164,8 +163,8 @@ impl Command for Du {
fn du_for_one_pattern(
args: DuArgs,
current_dir: &Path,
call_span: Span,
ctrl_c: Option<Arc<AtomicBool>>,
span: Span,
signals: &Signals,
) -> Result<impl Iterator<Item = Value> + Send, ShellError> {
let exclude = args.exclude.map_or(Ok(None), move |x| {
Pattern::new(x.item.as_ref())
@ -178,7 +177,7 @@ fn du_for_one_pattern(
let include_files = args.all;
let mut paths = match args.path {
Some(p) => nu_engine::glob_from(&p, current_dir, call_span, None),
Some(p) => nu_engine::glob_from(&p, current_dir, span, None),
// The * pattern should never fail.
None => nu_engine::glob_from(
&Spanned {
@ -186,7 +185,7 @@ fn du_for_one_pattern(
span: Span::unknown(),
},
current_dir,
call_span,
span,
None,
),
}
@ -205,7 +204,7 @@ fn du_for_one_pattern(
let min_size = args.min_size.map(|f| f.item as u64);
let params = DirBuilder {
tag: call_span,
tag: span,
min: min_size,
deref,
exclude,
@ -217,13 +216,13 @@ fn du_for_one_pattern(
match p {
Ok(a) => {
if a.is_dir() {
output.push(DirInfo::new(a, &params, max_depth, ctrl_c.clone()).into());
} else if let Ok(v) = FileInfo::new(a, deref, call_span) {
output.push(DirInfo::new(a, &params, max_depth, span, signals)?.into());
} else if let Ok(v) = FileInfo::new(a, deref, span) {
output.push(v.into());
}
}
Err(e) => {
output.push(Value::error(e, call_span));
output.push(Value::error(e, span));
}
}
}

View File

@ -1,5 +1,5 @@
use nu_engine::command_prelude::*;
use std::sync::{atomic::AtomicBool, Arc};
use nu_protocol::Signals;
use wax::{Glob as WaxGlob, WalkBehavior, WalkEntry};
#[derive(Clone)]
@ -125,7 +125,6 @@ impl Command for Glob {
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let ctrlc = engine_state.ctrlc.clone();
let span = call.head;
let glob_pattern: Spanned<String> = call.req(engine_state, stack, 0)?;
let depth = call.get_flag(engine_state, stack, "depth")?;
@ -216,7 +215,14 @@ impl Command for Glob {
inner: vec![],
})?
.flatten();
glob_to_value(ctrlc, glob_results, no_dirs, no_files, no_symlinks, span)
glob_to_value(
engine_state.signals(),
glob_results,
no_dirs,
no_files,
no_symlinks,
span,
)
} else {
let glob_results = glob
.walk_with_behavior(
@ -227,12 +233,19 @@ impl Command for Glob {
},
)
.flatten();
glob_to_value(ctrlc, glob_results, no_dirs, no_files, no_symlinks, span)
glob_to_value(
engine_state.signals(),
glob_results,
no_dirs,
no_files,
no_symlinks,
span,
)
}?;
Ok(result
.into_iter()
.into_pipeline_data(span, engine_state.ctrlc.clone()))
.into_pipeline_data(span, engine_state.signals().clone()))
}
}
@ -252,7 +265,7 @@ fn convert_patterns(columns: &[Value]) -> Result<Vec<String>, ShellError> {
}
fn glob_to_value<'a>(
ctrlc: Option<Arc<AtomicBool>>,
signals: &Signals,
glob_results: impl Iterator<Item = WalkEntry<'a>>,
no_dirs: bool,
no_files: bool,
@ -261,10 +274,7 @@ fn glob_to_value<'a>(
) -> Result<Vec<Value>, ShellError> {
let mut result: Vec<Value> = Vec::new();
for entry in glob_results {
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
result.clear();
return Err(ShellError::InterruptedByUser { span: None });
}
signals.check(span)?;
let file_type = entry.file_type();
if !(no_dirs && file_type.is_dir()

View File

@ -6,14 +6,13 @@ use nu_engine::glob_from;
use nu_engine::{command_prelude::*, env::current_dir};
use nu_glob::MatchOptions;
use nu_path::expand_to_real_path;
use nu_protocol::{DataSource, NuGlob, PipelineMetadata};
use nu_protocol::{DataSource, NuGlob, PipelineMetadata, Signals};
use pathdiff::diff_paths;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::{
path::PathBuf,
sync::Arc,
time::{SystemTime, UNIX_EPOCH},
};
@ -93,7 +92,6 @@ impl Command for Ls {
let du = call.has_flag(engine_state, stack, "du")?;
let directory = call.has_flag(engine_state, stack, "directory")?;
let use_mime_type = call.has_flag(engine_state, stack, "mime-type")?;
let ctrl_c = engine_state.ctrlc.clone();
let call_span = call.head;
#[allow(deprecated)]
let cwd = current_dir(engine_state, stack)?;
@ -116,10 +114,10 @@ impl Command for Ls {
Some(pattern_arg)
};
match input_pattern_arg {
None => Ok(ls_for_one_pattern(None, args, ctrl_c.clone(), cwd)?
None => Ok(ls_for_one_pattern(None, args, engine_state.signals(), cwd)?
.into_pipeline_data_with_metadata(
call_span,
ctrl_c,
engine_state.signals().clone(),
PipelineMetadata {
data_source: DataSource::Ls,
content_type: None,
@ -131,7 +129,7 @@ impl Command for Ls {
result_iters.push(ls_for_one_pattern(
Some(pat),
args,
ctrl_c.clone(),
engine_state.signals(),
cwd.clone(),
)?)
}
@ -143,7 +141,7 @@ impl Command for Ls {
.flatten()
.into_pipeline_data_with_metadata(
call_span,
ctrl_c,
engine_state.signals().clone(),
PipelineMetadata {
data_source: DataSource::Ls,
content_type: None,
@ -215,7 +213,7 @@ impl Command for Ls {
fn ls_for_one_pattern(
pattern_arg: Option<Spanned<NuGlob>>,
args: Args,
ctrl_c: Option<Arc<AtomicBool>>,
signals: &Signals,
cwd: PathBuf,
) -> Result<Box<dyn Iterator<Item = Value> + Send>, ShellError> {
let Args {
@ -342,7 +340,7 @@ fn ls_for_one_pattern(
let mut hidden_dirs = vec![];
let one_ctrl_c = ctrl_c.clone();
let signals = signals.clone();
Ok(Box::new(paths_peek.filter_map(move |x| match x {
Ok(path) => {
let metadata = match std::fs::symlink_metadata(&path) {
@ -412,7 +410,7 @@ fn ls_for_one_pattern(
call_span,
long,
du,
one_ctrl_c.clone(),
&signals,
use_mime_type,
);
match entry {
@ -474,7 +472,6 @@ fn path_contains_hidden_folder(path: &Path, folders: &[PathBuf]) -> bool {
#[cfg(unix)]
use std::os::unix::fs::FileTypeExt;
use std::path::Path;
use std::sync::atomic::AtomicBool;
pub fn get_file_type(md: &std::fs::Metadata, display_name: &str, use_mime_type: bool) -> String {
let ft = md.file_type();
@ -523,7 +520,7 @@ pub(crate) fn dir_entry_dict(
span: Span,
long: bool,
du: bool,
ctrl_c: Option<Arc<AtomicBool>>,
signals: &Signals,
use_mime_type: bool,
) -> Result<Value, ShellError> {
#[cfg(windows)]
@ -618,7 +615,7 @@ pub(crate) fn dir_entry_dict(
if md.is_dir() {
if du {
let params = DirBuilder::new(Span::new(0, 2), None, false, None, false);
let dir_size = DirInfo::new(filename, &params, None, ctrl_c).get_size();
let dir_size = DirInfo::new(filename, &params, None, span, signals)?.get_size();
Value::filesize(dir_size as i64, span)
} else {

View File

@ -51,7 +51,6 @@ impl Command for Open {
) -> Result<PipelineData, ShellError> {
let raw = call.has_flag(engine_state, stack, "raw")?;
let call_span = call.head;
let ctrlc = engine_state.ctrlc.clone();
#[allow(deprecated)]
let cwd = current_dir(engine_state, stack)?;
let mut paths = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
@ -122,8 +121,12 @@ impl Command for Open {
} else {
#[cfg(feature = "sqlite")]
if !raw {
let res = SQLiteDatabase::try_from_path(path, arg_span, ctrlc.clone())
.map(|db| db.into_value(call.head).into_pipeline_data());
let res = SQLiteDatabase::try_from_path(
path,
arg_span,
engine_state.signals().clone(),
)
.map(|db| db.into_value(call.head).into_pipeline_data());
if res.is_ok() {
return res;
@ -144,7 +147,7 @@ impl Command for Open {
};
let stream = PipelineData::ByteStream(
ByteStream::file(file, call_span, ctrlc.clone()),
ByteStream::file(file, call_span, engine_state.signals().clone()),
Some(PipelineMetadata {
data_source: DataSource::FilePath(path.to_path_buf()),
content_type: None,
@ -203,7 +206,7 @@ impl Command for Open {
Ok(output
.into_iter()
.flatten()
.into_pipeline_data(call_span, ctrlc))
.into_pipeline_data(call_span, engine_state.signals().clone()))
}
}

View File

@ -451,12 +451,7 @@ fn rm(
});
for result in iter {
if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) {
return Err(ShellError::InterruptedByUser {
span: Some(call.head),
});
}
engine_state.signals().check(call.head)?;
match result {
Ok(None) => {}
Ok(Some(msg)) => eprintln!("{msg}"),

View File

@ -5,15 +5,14 @@ use nu_engine::{command_prelude::*, current_dir};
use nu_path::expand_path_with;
use nu_protocol::{
ast::{Expr, Expression},
byte_stream::copy_with_interrupt,
byte_stream::copy_with_signals,
process::ChildPipe,
ByteStreamSource, DataSource, OutDest, PipelineMetadata,
ByteStreamSource, DataSource, OutDest, PipelineMetadata, Signals,
};
use std::{
fs::File,
io::{self, BufRead, BufReader, Read, Write},
path::{Path, PathBuf},
sync::{atomic::AtomicBool, Arc},
thread,
};
@ -120,30 +119,30 @@ impl Command for Save {
)?;
let size = stream.known_size();
let ctrlc = engine_state.ctrlc.clone();
let signals = engine_state.signals();
match stream.into_source() {
ByteStreamSource::Read(read) => {
stream_to_file(read, size, ctrlc, file, span, progress)?;
stream_to_file(read, size, signals, file, span, progress)?;
}
ByteStreamSource::File(source) => {
stream_to_file(source, size, ctrlc, file, span, progress)?;
stream_to_file(source, size, signals, file, span, progress)?;
}
ByteStreamSource::Child(mut child) => {
fn write_or_consume_stderr(
stderr: ChildPipe,
file: Option<File>,
span: Span,
ctrlc: Option<Arc<AtomicBool>>,
signals: &Signals,
progress: bool,
) -> Result<(), ShellError> {
if let Some(file) = file {
match stderr {
ChildPipe::Pipe(pipe) => {
stream_to_file(pipe, None, ctrlc, file, span, progress)
stream_to_file(pipe, None, signals, file, span, progress)
}
ChildPipe::Tee(tee) => {
stream_to_file(tee, None, ctrlc, file, span, progress)
stream_to_file(tee, None, signals, file, span, progress)
}
}?
} else {
@ -163,14 +162,14 @@ impl Command for Save {
// delegate a thread to redirect stderr to result.
let handler = stderr
.map(|stderr| {
let ctrlc = ctrlc.clone();
let signals = signals.clone();
thread::Builder::new().name("stderr saver".into()).spawn(
move || {
write_or_consume_stderr(
stderr,
stderr_file,
span,
ctrlc,
&signals,
progress,
)
},
@ -181,10 +180,10 @@ impl Command for Save {
let res = match stdout {
ChildPipe::Pipe(pipe) => {
stream_to_file(pipe, None, ctrlc, file, span, progress)
stream_to_file(pipe, None, signals, file, span, progress)
}
ChildPipe::Tee(tee) => {
stream_to_file(tee, None, ctrlc, file, span, progress)
stream_to_file(tee, None, signals, file, span, progress)
}
};
if let Some(h) = handler {
@ -202,7 +201,7 @@ impl Command for Save {
stderr,
stderr_file,
span,
ctrlc,
signals,
progress,
)?;
}
@ -510,7 +509,7 @@ fn get_files(
fn stream_to_file(
source: impl Read,
known_size: Option<u64>,
ctrlc: Option<Arc<AtomicBool>>,
signals: &Signals,
mut file: File,
span: Span,
progress: bool,
@ -526,9 +525,9 @@ fn stream_to_file(
let mut reader = BufReader::new(source);
let res = loop {
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
if let Err(err) = signals.check(span) {
bar.abandoned_msg("# Cancelled #".to_owned());
return Ok(());
return Err(err);
}
match reader.fill_buf() {
@ -555,7 +554,7 @@ fn stream_to_file(
Ok(())
}
} else {
copy_with_interrupt(source, file, span, ctrlc.as_deref())?;
copy_with_signals(source, file, span, signals)?;
Ok(())
}
}

View File

@ -143,7 +143,6 @@ impl Command for Watch {
None => RecursiveMode::Recursive,
};
let ctrlc_ref = &engine_state.ctrlc.clone();
let (tx, rx) = channel();
let mut debouncer = match new_debouncer(debounce_duration, None, tx) {
@ -256,7 +255,7 @@ impl Command for Watch {
}
Err(RecvTimeoutError::Timeout) => {}
}
if nu_utils::ctrl_c::was_pressed(ctrlc_ref) {
if engine_state.signals().interrupted() {
break;
}
}

View File

@ -116,7 +116,7 @@ only unwrap the outer list, and leave the variable's contents untouched."#
Ok(input
.into_iter()
.chain(other.into_pipeline_data())
.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
.into_pipeline_data_with_metadata(call.head, engine_state.signals().clone(), metadata))
}
}

View File

@ -140,7 +140,7 @@ pub fn compact(
_ => true,
}
},
engine_state.ctrlc.clone(),
engine_state.signals(),
)
.map(|m| m.set_metadata(metadata))
}

View File

@ -80,8 +80,6 @@ fn default(
let value: Value = call.req(engine_state, stack, 0)?;
let column: Option<Spanned<String>> = call.opt(engine_state, stack, 1)?;
let ctrlc = engine_state.ctrlc.clone();
if let Some(column) = column {
input
.map(
@ -109,7 +107,7 @@ fn default(
}
_ => item,
},
ctrlc,
engine_state.signals(),
)
.map(|x| x.set_metadata(metadata))
} else if input.is_nothing() {
@ -121,7 +119,7 @@ fn default(
Value::Nothing { .. } => value.clone(),
x => x,
},
ctrlc,
engine_state.signals(),
)
.map(|x| x.set_metadata(metadata))
}

View File

@ -102,7 +102,11 @@ fn drop_cols(
Err(e) => Value::error(e, head),
}
}))
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
.into_pipeline_data_with_metadata(
head,
engine_state.signals().clone(),
metadata,
))
} else {
Ok(PipelineData::Empty)
}

View File

@ -156,7 +156,7 @@ impl Command for DropNth {
.take(start)
.into_pipeline_data_with_metadata(
head,
engine_state.ctrlc.clone(),
engine_state.signals().clone(),
metadata,
))
}
@ -177,7 +177,7 @@ impl Command for DropNth {
rows,
current: 0,
}
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
}
}

View File

@ -140,7 +140,7 @@ with 'transpose' first."#
}
}
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.signals().clone()))
}
PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() {
@ -171,7 +171,7 @@ with 'transpose' first."#
}
}
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.signals().clone()))
} else {
Ok(PipelineData::Empty)
}
@ -185,7 +185,7 @@ with 'transpose' first."#
.and_then(|x| {
x.filter(
move |x| if !keep_empty { !x.is_nothing() } else { true },
engine_state.ctrlc.clone(),
engine_state.signals(),
)
})
.map(|data| data.set_metadata(metadata))

View File

@ -52,7 +52,6 @@ impl Command for Enumerate {
) -> Result<PipelineData, ShellError> {
let head = call.head;
let metadata = input.metadata();
let ctrlc = engine_state.ctrlc.clone();
Ok(input
.into_iter()
@ -66,7 +65,7 @@ impl Command for Enumerate {
head,
)
})
.into_pipeline_data_with_metadata(head, ctrlc, metadata))
.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
}
}

View File

@ -78,7 +78,7 @@ impl Command for Every {
None
}
})
.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
.into_pipeline_data_with_metadata(call.head, engine_state.signals().clone(), metadata))
}
}

View File

@ -72,7 +72,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
}
}
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.signals().clone()))
}
PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() {
@ -97,7 +97,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
}
}
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.signals().clone()))
} else {
Ok(PipelineData::Empty)
}
@ -117,7 +117,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
Some(Value::error(err, span))
}
}
.into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.signals().clone()))
}
}
.map(|data| data.set_metadata(metadata))

View File

@ -213,7 +213,6 @@ fn find_with_regex(
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let span = call.head;
let ctrlc = engine_state.ctrlc.clone();
let config = engine_state.get_config().clone();
let insensitive = call.has_flag(engine_state, stack, "ignore-case")?;
@ -246,7 +245,7 @@ fn find_with_regex(
Value::List { vals, .. } => values_match_find(vals, &re, &config, invert),
_ => false,
},
ctrlc,
engine_state.signals(),
)
}
@ -349,18 +348,16 @@ fn find_with_rest_and_highlight(
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let span = call.head;
let ctrlc = engine_state.ctrlc.clone();
let engine_state = engine_state.clone();
let config = engine_state.get_config().clone();
let filter_config = engine_state.get_config().clone();
let invert = call.has_flag(&engine_state, stack, "invert")?;
let terms = call.rest::<Value>(&engine_state, stack, 0)?;
let invert = call.has_flag(engine_state, stack, "invert")?;
let terms = call.rest::<Value>(engine_state, stack, 0)?;
let lower_terms = terms
.iter()
.map(|v| Value::string(v.to_expanded_string("", &config).to_lowercase(), span))
.collect::<Vec<Value>>();
let style_computer = StyleComputer::from_config(&engine_state, stack);
let style_computer = StyleComputer::from_config(engine_state, stack);
// Currently, search results all use the same style.
// Also note that this sample string is passed into user-written code (the closure that may or may not be
// defined for "string").
@ -369,7 +366,7 @@ fn find_with_rest_and_highlight(
style_computer.compute("search_result", &Value::string("search result", span));
let cols_to_search_in_map: Vec<_> = call
.get_flag(&engine_state, stack, "columns")?
.get_flag(engine_state, stack, "columns")?
.unwrap_or_default();
let cols_to_search_in_filter = cols_to_search_in_map.clone();
@ -401,7 +398,7 @@ fn find_with_rest_and_highlight(
_ => x,
}
},
ctrlc.clone(),
engine_state.signals(),
)?
.filter(
move |value| {
@ -414,7 +411,7 @@ fn find_with_rest_and_highlight(
invert,
)
},
ctrlc,
engine_state.signals(),
),
PipelineData::ListStream(stream, metadata) => {
let stream = stream.modify(|iter| {

View File

@ -1,4 +1,5 @@
use nu_engine::command_prelude::*;
use nu_protocol::Signals;
use std::io::Read;
#[derive(Clone)]
@ -133,8 +134,7 @@ fn first_helper(
}
}
Value::Range { val, .. } => {
let ctrlc = engine_state.ctrlc.clone();
let mut iter = val.into_range_iter(span, ctrlc.clone());
let mut iter = val.into_range_iter(span, Signals::empty());
if return_single_element {
if let Some(v) = iter.next() {
Ok(v.into_pipeline_data())
@ -142,9 +142,11 @@ fn first_helper(
Err(ShellError::AccessEmptyContent { span: head })
}
} else {
Ok(iter
.take(rows)
.into_pipeline_data_with_metadata(span, ctrlc, metadata))
Ok(iter.take(rows).into_pipeline_data_with_metadata(
span,
engine_state.signals().clone(),
metadata,
))
}
}
// Propagate errors by explicitly matching them before the final case.
@ -189,7 +191,7 @@ fn first_helper(
ByteStream::read(
reader.take(rows as u64),
head,
None,
Signals::empty(),
ByteStreamType::Binary,
),
metadata,

View File

@ -127,7 +127,7 @@ fn flatten(
input
.flat_map(
move |item| flat_value(&columns, item, flatten_all),
engine_state.ctrlc.clone(),
engine_state.signals(),
)
.map(|x| x.set_metadata(metadata))
}

View File

@ -62,7 +62,6 @@ If multiple cell paths are given, this will produce a list of values."#
let mut rest: Vec<CellPath> = call.rest(engine_state, stack, 1)?;
let ignore_errors = call.has_flag(engine_state, stack, "ignore-errors")?;
let sensitive = call.has_flag(engine_state, stack, "sensitive")?;
let ctrlc = engine_state.ctrlc.clone();
let metadata = input.metadata();
if ignore_errors {
@ -89,7 +88,9 @@ If multiple cell paths are given, this will produce a list of values."#
output.push(val?);
}
Ok(output.into_iter().into_pipeline_data(span, ctrlc))
Ok(output
.into_iter()
.into_pipeline_data(span, engine_state.signals().clone()))
}
.map(|x| x.set_metadata(metadata))
}

View File

@ -55,18 +55,19 @@ impl Command for Group {
) -> Result<PipelineData, ShellError> {
let head = call.head;
let group_size: Spanned<usize> = call.req(engine_state, stack, 0)?;
let ctrlc = engine_state.ctrlc.clone();
let metadata = input.metadata();
//FIXME: add in support for external redirection when engine-q supports it generally
let each_group_iterator = EachGroupIterator {
group_size: group_size.item,
input: Box::new(input.into_iter()),
span: head,
};
Ok(each_group_iterator.into_pipeline_data_with_metadata(head, ctrlc, metadata))
Ok(each_group_iterator.into_pipeline_data_with_metadata(
head,
engine_state.signals().clone(),
metadata,
))
}
}

View File

@ -222,7 +222,11 @@ fn insert(
Ok(pre_elems
.into_iter()
.chain(stream)
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
.into_pipeline_data_with_metadata(
head,
engine_state.signals().clone(),
metadata,
))
} else if let Value::Closure { val, .. } = replacement {
let mut closure = ClosureEval::new(engine_state, stack, *val);
let stream = stream.map(move |mut value| {

View File

@ -147,7 +147,7 @@ interleave
// Now that threads are writing to the channel, we just return it as a stream
Ok(rx
.into_iter()
.into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.signals().clone()))
}
}

View File

@ -67,7 +67,7 @@ impl Command for Items {
}
}
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.signals().clone()))
}
Value::Error { error, .. } => Err(*error),
other => Err(ShellError::OnlySupportsThisInputType {

View File

@ -99,14 +99,10 @@ impl Command for Last {
let mut buf = VecDeque::new();
for row in iterator {
if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) {
return Err(ShellError::InterruptedByUser { span: Some(head) });
}
engine_state.signals().check(head)?;
if buf.len() == rows {
buf.pop_front();
}
buf.push_back(row);
}

View File

@ -26,7 +26,6 @@ impl Command for Lines {
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
let ctrlc = engine_state.ctrlc.clone();
let skip_empty = call.has_flag(engine_state, stack, "skip-empty")?;
let span = input.span().unwrap_or(call.head);
@ -91,7 +90,7 @@ impl Command for Lines {
Ok(line) => Value::string(line, head),
Err(err) => Value::error(err, head),
})
.into_pipeline_data(head, ctrlc))
.into_pipeline_data(head, engine_state.signals().clone()))
} else {
Ok(PipelineData::empty())
}

View File

@ -88,7 +88,6 @@ repeating this process with row 1, and so on."#
let head = call.head;
let merge_value: Value = call.req(engine_state, stack, 0)?;
let metadata = input.metadata();
let ctrlc = engine_state.ctrlc.clone();
match (&input, merge_value) {
// table (list of records)
@ -110,7 +109,11 @@ repeating this process with row 1, and so on."#
(Err(error), _) => Value::error(error, head),
});
Ok(res.into_pipeline_data_with_metadata(head, ctrlc, metadata))
Ok(res.into_pipeline_data_with_metadata(
head,
engine_state.signals().clone(),
metadata,
))
}
// record
(

View File

@ -144,7 +144,6 @@ impl Command for Move {
};
let metadata = input.metadata();
let ctrlc = engine_state.ctrlc.clone();
match input {
PipelineData::Value(Value::List { .. }, ..) | PipelineData::ListStream { .. } => {
@ -158,7 +157,11 @@ impl Command for Move {
Err(error) => Value::error(error, head),
});
Ok(res.into_pipeline_data_with_metadata(head, ctrlc, metadata))
Ok(res.into_pipeline_data_with_metadata(
head,
engine_state.signals().clone(),
metadata,
))
}
PipelineData::Value(Value::Record { val, .. }, ..) => {
Ok(move_record_columns(&val, &columns, &before_or_after, head)?

View File

@ -1,6 +1,6 @@
use super::utils::chain_error_with_input;
use nu_engine::{command_prelude::*, ClosureEvalOnce};
use nu_protocol::engine::Closure;
use nu_protocol::{engine::Closure, Signals};
use rayon::prelude::*;
#[derive(Clone)]
@ -158,12 +158,11 @@ impl Command for ParEach {
})
.collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(span, engine_state.ctrlc.clone())
apply_order(vec).into_pipeline_data(span, engine_state.signals().clone())
})),
Value::Range { val, .. } => Ok(create_pool(max_threads)?.install(|| {
let ctrlc = engine_state.ctrlc.clone();
let vec = val
.into_range_iter(span, ctrlc.clone())
.into_range_iter(span, Signals::empty())
.enumerate()
.par_bridge()
.map(move |(index, value)| {
@ -184,7 +183,7 @@ impl Command for ParEach {
})
.collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(span, ctrlc)
apply_order(vec).into_pipeline_data(span, engine_state.signals().clone())
})),
// This match allows non-iterables to be accepted,
// which is currently considered undesirable (Nov 2022).
@ -212,7 +211,7 @@ impl Command for ParEach {
})
.collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
apply_order(vec).into_pipeline_data(head, engine_state.signals().clone())
})),
PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() {
@ -236,14 +235,14 @@ impl Command for ParEach {
})
.collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
apply_order(vec).into_pipeline_data(head, engine_state.signals().clone())
}))
} else {
Ok(PipelineData::empty())
}
}
}
.and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.ctrlc.clone()))
.and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.signals()))
.map(|data| data.set_metadata(metadata))
}
}

View File

@ -117,7 +117,7 @@ only unwrap the outer list, and leave the variable's contents untouched."#
.into_pipeline_data()
.into_iter()
.chain(input)
.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
.into_pipeline_data_with_metadata(call.head, engine_state.signals().clone(), metadata))
}
}

View File

@ -106,7 +106,7 @@ impl Command for Range {
Ok(PipelineData::Value(Value::nothing(head), None))
} else {
let iter = v.into_iter().skip(from).take(to - from + 1);
Ok(iter.into_pipeline_data(head, engine_state.ctrlc.clone()))
Ok(iter.into_pipeline_data(head, engine_state.signals().clone()))
}
} else {
let from = start as usize;
@ -116,7 +116,7 @@ impl Command for Range {
Ok(PipelineData::Value(Value::nothing(head), None))
} else {
let iter = input.into_iter().skip(from).take(to - from + 1);
Ok(iter.into_pipeline_data(head, engine_state.ctrlc.clone()))
Ok(iter.into_pipeline_data(head, engine_state.signals().clone()))
}
}
.map(|x| x.set_metadata(metadata))

Some files were not shown because too many files have changed in this diff Show More