mirror of
https://github.com/nushell/nushell.git
synced 2025-08-09 09:45:50 +02:00
Add and use new Signals
struct (#13314)
# Description This PR introduces a new `Signals` struct to replace our adhoc passing around of `ctrlc: Option<Arc<AtomicBool>>`. Doing so has a few benefits: - We can better enforce when/where resetting or triggering an interrupt is allowed. - Consolidates `nu_utils::ctrl_c::was_pressed` and other ad-hoc re-implementations into a single place: `Signals::check`. - This allows us to add other types of signals later if we want. E.g., exiting or suspension. - Similarly, we can more easily change the underlying implementation if we need to in the future. - Places that used to have a `ctrlc` of `None` now use `Signals::empty()`, so we can double check these usages for correctness in the future.
This commit is contained in:
@ -78,7 +78,7 @@ impl Command for BytesAdd {
|
||||
end,
|
||||
cell_paths,
|
||||
};
|
||||
operate(add, arg, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(add, arg, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -83,7 +83,7 @@ impl Command for BytesAt {
|
||||
cell_paths,
|
||||
};
|
||||
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(action, args, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -60,7 +60,12 @@ impl Command for BytesCollect {
|
||||
)
|
||||
.flatten();
|
||||
|
||||
let output = ByteStream::from_result_iter(iter, span, None, ByteStreamType::Binary);
|
||||
let output = ByteStream::from_result_iter(
|
||||
iter,
|
||||
span,
|
||||
engine_state.signals().clone(),
|
||||
ByteStreamType::Binary,
|
||||
);
|
||||
|
||||
Ok(PipelineData::ByteStream(output, metadata))
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ impl Command for BytesEndsWith {
|
||||
pattern,
|
||||
cell_paths,
|
||||
};
|
||||
operate(ends_with, arg, input, head, engine_state.ctrlc.clone())
|
||||
operate(ends_with, arg, input, head, engine_state.signals())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -71,7 +71,7 @@ impl Command for BytesIndexOf {
|
||||
all: call.has_flag(engine_state, stack, "all")?,
|
||||
cell_paths,
|
||||
};
|
||||
operate(index_of, arg, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(index_of, arg, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -46,7 +46,7 @@ impl Command for BytesLen {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 1)?;
|
||||
let arg = CellPathOnlyArgs::from(cell_paths);
|
||||
operate(length, arg, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(length, arg, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -73,7 +73,7 @@ impl Command for BytesRemove {
|
||||
all: call.has_flag(engine_state, stack, "all")?,
|
||||
};
|
||||
|
||||
operate(remove, arg, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(remove, arg, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -73,7 +73,7 @@ impl Command for BytesReplace {
|
||||
all: call.has_flag(engine_state, stack, "all")?,
|
||||
};
|
||||
|
||||
operate(replace, arg, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(replace, arg, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -42,7 +42,7 @@ impl Command for BytesReverse {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
|
||||
let arg = CellPathOnlyArgs::from(cell_paths);
|
||||
operate(reverse, arg, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(reverse, arg, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -79,7 +79,7 @@ impl Command for BytesStartsWith {
|
||||
pattern,
|
||||
cell_paths,
|
||||
};
|
||||
operate(starts_with, arg, input, head, engine_state.ctrlc.clone())
|
||||
operate(starts_with, arg, input, head, engine_state.signals())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -165,7 +165,7 @@ fn fill(
|
||||
cell_paths,
|
||||
};
|
||||
|
||||
operate(action, arg, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(action, arg, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn action(input: &Value, args: &Arguments, span: Span) -> Value {
|
||||
|
@ -138,7 +138,7 @@ fn into_binary(
|
||||
cell_paths,
|
||||
compact: call.has_flag(engine_state, stack, "compact")?,
|
||||
};
|
||||
operate(action, args, input, head, engine_state.ctrlc.clone())
|
||||
operate(action, args, input, head, engine_state.signals())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -107,7 +107,7 @@ fn into_bool(
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
|
||||
let args = CellPathOnlyArgs::from(cell_paths);
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(action, args, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn string_to_boolean(s: &str, span: Span) -> Result<bool, ShellError> {
|
||||
|
@ -141,7 +141,7 @@ impl Command for SubCommand {
|
||||
zone_options,
|
||||
cell_paths,
|
||||
};
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(action, args, input, call.head, engine_state.signals())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -166,7 +166,7 @@ fn into_duration(
|
||||
ret
|
||||
}
|
||||
},
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals(),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -68,7 +68,7 @@ impl Command for SubCommand {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
|
||||
let args = CellPathOnlyArgs::from(cell_paths);
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(action, args, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -49,7 +49,7 @@ impl Command for SubCommand {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
|
||||
let args = CellPathOnlyArgs::from(cell_paths);
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(action, args, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -87,7 +87,7 @@ fn glob_helper(
|
||||
Ok(Value::glob(stream.into_string()?, false, head).into_pipeline_data())
|
||||
} else {
|
||||
let args = Arguments { cell_paths };
|
||||
operate(action, args, input, head, engine_state.ctrlc.clone())
|
||||
operate(action, args, input, head, engine_state.signals())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -158,7 +158,7 @@ impl Command for SubCommand {
|
||||
signed,
|
||||
cell_paths,
|
||||
};
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
operate(action, args, input, call.head, engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -125,7 +125,7 @@ fn into_record(
|
||||
),
|
||||
},
|
||||
Value::Range { val, .. } => Value::record(
|
||||
val.into_range_iter(span, engine_state.ctrlc.clone())
|
||||
val.into_range_iter(span, engine_state.signals().clone())
|
||||
.enumerate()
|
||||
.map(|(idx, val)| (format!("{idx}"), val))
|
||||
.collect(),
|
||||
|
@ -180,7 +180,7 @@ fn string_helper(
|
||||
cell_paths,
|
||||
config,
|
||||
};
|
||||
operate(action, args, input, head, engine_state.ctrlc.clone())
|
||||
operate(action, args, input, head, engine_state.signals())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,14 +57,12 @@ impl Command for IntoValue {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let engine_state = engine_state.clone();
|
||||
let metadata = input.metadata();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let span = call.head;
|
||||
let display_as_filesizes = call.has_flag(&engine_state, stack, "prefer-filesizes")?;
|
||||
let display_as_filesizes = call.has_flag(engine_state, stack, "prefer-filesizes")?;
|
||||
|
||||
// the columns to update
|
||||
let columns: Option<Value> = call.get_flag(&engine_state, stack, "columns")?;
|
||||
let columns: Option<Value> = call.get_flag(engine_state, stack, "columns")?;
|
||||
let columns: Option<HashSet<String>> = match columns {
|
||||
Some(val) => Some(
|
||||
val.into_list()?
|
||||
@ -81,7 +79,7 @@ impl Command for IntoValue {
|
||||
display_as_filesizes,
|
||||
span,
|
||||
}
|
||||
.into_pipeline_data(span, ctrlc)
|
||||
.into_pipeline_data(span, engine_state.signals().clone())
|
||||
.set_metadata(metadata))
|
||||
}
|
||||
}
|
||||
|
@ -2,13 +2,8 @@ use crate::database::values::sqlite::{open_sqlite_db, values_to_sql};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
use itertools::Itertools;
|
||||
use std::{
|
||||
path::Path,
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use nu_protocol::Signals;
|
||||
use std::path::Path;
|
||||
|
||||
pub const DEFAULT_TABLE_NAME: &str = "main";
|
||||
|
||||
@ -188,23 +183,18 @@ fn operate(
|
||||
let file_name: Spanned<String> = call.req(engine_state, stack, 0)?;
|
||||
let table_name: Option<Spanned<String>> = call.get_flag(engine_state, stack, "table-name")?;
|
||||
let table = Table::new(&file_name, table_name)?;
|
||||
let ctrl_c = engine_state.ctrlc.clone();
|
||||
|
||||
match action(input, table, span, ctrl_c) {
|
||||
Ok(val) => Ok(val.into_pipeline_data()),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
Ok(action(input, table, span, engine_state.signals())?.into_pipeline_data())
|
||||
}
|
||||
|
||||
fn action(
|
||||
input: PipelineData,
|
||||
table: Table,
|
||||
span: Span,
|
||||
ctrl_c: Option<Arc<AtomicBool>>,
|
||||
signals: &Signals,
|
||||
) -> Result<Value, ShellError> {
|
||||
match input {
|
||||
PipelineData::ListStream(stream, _) => {
|
||||
insert_in_transaction(stream.into_iter(), span, table, ctrl_c)
|
||||
insert_in_transaction(stream.into_iter(), span, table, signals)
|
||||
}
|
||||
PipelineData::Value(
|
||||
Value::List {
|
||||
@ -212,9 +202,9 @@ fn action(
|
||||
internal_span,
|
||||
},
|
||||
_,
|
||||
) => insert_in_transaction(vals.into_iter(), internal_span, table, ctrl_c),
|
||||
) => insert_in_transaction(vals.into_iter(), internal_span, table, signals),
|
||||
PipelineData::Value(val, _) => {
|
||||
insert_in_transaction(std::iter::once(val), span, table, ctrl_c)
|
||||
insert_in_transaction(std::iter::once(val), span, table, signals)
|
||||
}
|
||||
_ => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list".into(),
|
||||
@ -229,7 +219,7 @@ fn insert_in_transaction(
|
||||
stream: impl Iterator<Item = Value>,
|
||||
span: Span,
|
||||
mut table: Table,
|
||||
ctrl_c: Option<Arc<AtomicBool>>,
|
||||
signals: &Signals,
|
||||
) -> Result<Value, ShellError> {
|
||||
let mut stream = stream.peekable();
|
||||
let first_val = match stream.peek() {
|
||||
@ -251,17 +241,15 @@ fn insert_in_transaction(
|
||||
let tx = table.try_init(&first_val)?;
|
||||
|
||||
for stream_value in stream {
|
||||
if let Some(ref ctrlc) = ctrl_c {
|
||||
if ctrlc.load(Ordering::Relaxed) {
|
||||
tx.rollback().map_err(|e| ShellError::GenericError {
|
||||
error: "Failed to rollback SQLite transaction".into(),
|
||||
msg: e.to_string(),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: Vec::new(),
|
||||
})?;
|
||||
return Err(ShellError::InterruptedByUser { span: None });
|
||||
}
|
||||
if let Err(err) = signals.check(span) {
|
||||
tx.rollback().map_err(|e| ShellError::GenericError {
|
||||
error: "Failed to rollback SQLite transaction".into(),
|
||||
msg: e.to_string(),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: Vec::new(),
|
||||
})?;
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
let val = stream_value.as_record()?;
|
||||
|
@ -2,7 +2,7 @@ use super::definitions::{
|
||||
db_column::DbColumn, db_constraint::DbConstraint, db_foreignkey::DbForeignKey,
|
||||
db_index::DbIndex, db_table::DbTable,
|
||||
};
|
||||
use nu_protocol::{CustomValue, PipelineData, Record, ShellError, Span, Spanned, Value};
|
||||
use nu_protocol::{CustomValue, PipelineData, Record, ShellError, Signals, Span, Spanned, Value};
|
||||
use rusqlite::{
|
||||
types::ValueRef, Connection, DatabaseName, Error as SqliteError, OpenFlags, Row, Statement,
|
||||
ToSql,
|
||||
@ -12,7 +12,6 @@ use std::{
|
||||
fs::File,
|
||||
io::Read,
|
||||
path::{Path, PathBuf},
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
|
||||
const SQLITE_MAGIC_BYTES: &[u8] = "SQLite format 3\0".as_bytes();
|
||||
@ -24,25 +23,21 @@ pub struct SQLiteDatabase {
|
||||
// 1) YAGNI, 2) it's not obvious how cloning a connection could work, 3) state
|
||||
// management gets tricky quick. Revisit this approach if we find a compelling use case.
|
||||
pub path: PathBuf,
|
||||
#[serde(skip)]
|
||||
#[serde(skip, default = "Signals::empty")]
|
||||
// this understandably can't be serialized. think that's OK, I'm not aware of a
|
||||
// reason why a CustomValue would be serialized outside of a plugin
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
signals: Signals,
|
||||
}
|
||||
|
||||
impl SQLiteDatabase {
|
||||
pub fn new(path: &Path, ctrlc: Option<Arc<AtomicBool>>) -> Self {
|
||||
pub fn new(path: &Path, signals: Signals) -> Self {
|
||||
Self {
|
||||
path: PathBuf::from(path),
|
||||
ctrlc,
|
||||
signals,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn try_from_path(
|
||||
path: &Path,
|
||||
span: Span,
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
) -> Result<Self, ShellError> {
|
||||
pub fn try_from_path(path: &Path, span: Span, signals: Signals) -> Result<Self, ShellError> {
|
||||
let mut file = File::open(path).map_err(|e| ShellError::ReadingFile {
|
||||
msg: e.to_string(),
|
||||
span,
|
||||
@ -56,7 +51,7 @@ impl SQLiteDatabase {
|
||||
})
|
||||
.and_then(|_| {
|
||||
if buf == SQLITE_MAGIC_BYTES {
|
||||
Ok(SQLiteDatabase::new(path, ctrlc))
|
||||
Ok(SQLiteDatabase::new(path, signals))
|
||||
} else {
|
||||
Err(ShellError::ReadingFile {
|
||||
msg: "Not a SQLite file".into(),
|
||||
@ -72,7 +67,7 @@ impl SQLiteDatabase {
|
||||
Value::Custom { val, .. } => match val.as_any().downcast_ref::<Self>() {
|
||||
Some(db) => Ok(Self {
|
||||
path: db.path.clone(),
|
||||
ctrlc: db.ctrlc.clone(),
|
||||
signals: db.signals.clone(),
|
||||
}),
|
||||
None => Err(ShellError::CantConvert {
|
||||
to_type: "database".into(),
|
||||
@ -107,16 +102,8 @@ impl SQLiteDatabase {
|
||||
call_span: Span,
|
||||
) -> Result<Value, ShellError> {
|
||||
let conn = open_sqlite_db(&self.path, call_span)?;
|
||||
|
||||
let stream = run_sql_query(conn, sql, params, self.ctrlc.clone()).map_err(|e| {
|
||||
ShellError::GenericError {
|
||||
error: "Failed to query SQLite database".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(sql.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}
|
||||
})?;
|
||||
let stream = run_sql_query(conn, sql, params, &self.signals)
|
||||
.map_err(|e| e.into_shell_error(sql.span, "Failed to query SQLite database"))?;
|
||||
|
||||
Ok(stream)
|
||||
}
|
||||
@ -352,12 +339,7 @@ impl SQLiteDatabase {
|
||||
|
||||
impl CustomValue for SQLiteDatabase {
|
||||
fn clone_value(&self, span: Span) -> Value {
|
||||
let cloned = SQLiteDatabase {
|
||||
path: self.path.clone(),
|
||||
ctrlc: self.ctrlc.clone(),
|
||||
};
|
||||
|
||||
Value::custom(Box::new(cloned), span)
|
||||
Value::custom(Box::new(self.clone()), span)
|
||||
}
|
||||
|
||||
fn type_name(&self) -> String {
|
||||
@ -366,13 +348,8 @@ impl CustomValue for SQLiteDatabase {
|
||||
|
||||
fn to_base_value(&self, span: Span) -> Result<Value, ShellError> {
|
||||
let db = open_sqlite_db(&self.path, span)?;
|
||||
read_entire_sqlite_db(db, span, self.ctrlc.clone()).map_err(|e| ShellError::GenericError {
|
||||
error: "Failed to read from SQLite database".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
read_entire_sqlite_db(db, span, &self.signals)
|
||||
.map_err(|e| e.into_shell_error(span, "Failed to read from SQLite database"))
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
@ -396,20 +373,12 @@ impl CustomValue for SQLiteDatabase {
|
||||
fn follow_path_string(
|
||||
&self,
|
||||
_self_span: Span,
|
||||
_column_name: String,
|
||||
column_name: String,
|
||||
path_span: Span,
|
||||
) -> Result<Value, ShellError> {
|
||||
let db = open_sqlite_db(&self.path, path_span)?;
|
||||
|
||||
read_single_table(db, _column_name, path_span, self.ctrlc.clone()).map_err(|e| {
|
||||
ShellError::GenericError {
|
||||
error: "Failed to read from SQLite database".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(path_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}
|
||||
})
|
||||
read_single_table(db, column_name, path_span, &self.signals)
|
||||
.map_err(|e| e.into_shell_error(path_span, "Failed to read from SQLite database"))
|
||||
}
|
||||
|
||||
fn typetag_name(&self) -> &'static str {
|
||||
@ -426,12 +395,12 @@ pub fn open_sqlite_db(path: &Path, call_span: Span) -> Result<Connection, ShellE
|
||||
open_connection_in_memory_custom()
|
||||
} else {
|
||||
let path = path.to_string_lossy().to_string();
|
||||
Connection::open(path).map_err(|e| ShellError::GenericError {
|
||||
Connection::open(path).map_err(|err| ShellError::GenericError {
|
||||
error: "Failed to open SQLite database".into(),
|
||||
msg: e.to_string(),
|
||||
msg: err.to_string(),
|
||||
span: Some(call_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
inner: Vec::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -440,11 +409,10 @@ fn run_sql_query(
|
||||
conn: Connection,
|
||||
sql: &Spanned<String>,
|
||||
params: NuSqlParams,
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
) -> Result<Value, SqliteError> {
|
||||
signals: &Signals,
|
||||
) -> Result<Value, SqliteOrShellError> {
|
||||
let stmt = conn.prepare(&sql.item)?;
|
||||
|
||||
prepared_statement_to_nu_list(stmt, params, sql.span, ctrlc)
|
||||
prepared_statement_to_nu_list(stmt, params, sql.span, signals)
|
||||
}
|
||||
|
||||
// This is taken from to text local_into_string but tweaks it a bit so that certain formatting does not happen
|
||||
@ -534,23 +502,56 @@ pub fn nu_value_to_params(value: Value) -> Result<NuSqlParams, ShellError> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum SqliteOrShellError {
|
||||
SqliteError(SqliteError),
|
||||
ShellError(ShellError),
|
||||
}
|
||||
|
||||
impl From<SqliteError> for SqliteOrShellError {
|
||||
fn from(error: SqliteError) -> Self {
|
||||
Self::SqliteError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ShellError> for SqliteOrShellError {
|
||||
fn from(error: ShellError) -> Self {
|
||||
Self::ShellError(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl SqliteOrShellError {
|
||||
fn into_shell_error(self, span: Span, msg: &str) -> ShellError {
|
||||
match self {
|
||||
Self::SqliteError(err) => ShellError::GenericError {
|
||||
error: msg.into(),
|
||||
msg: err.to_string(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: Vec::new(),
|
||||
},
|
||||
Self::ShellError(err) => err,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_single_table(
|
||||
conn: Connection,
|
||||
table_name: String,
|
||||
call_span: Span,
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
) -> Result<Value, SqliteError> {
|
||||
signals: &Signals,
|
||||
) -> Result<Value, SqliteOrShellError> {
|
||||
// TODO: Should use params here?
|
||||
let stmt = conn.prepare(&format!("SELECT * FROM [{table_name}]"))?;
|
||||
prepared_statement_to_nu_list(stmt, NuSqlParams::default(), call_span, ctrlc)
|
||||
prepared_statement_to_nu_list(stmt, NuSqlParams::default(), call_span, signals)
|
||||
}
|
||||
|
||||
fn prepared_statement_to_nu_list(
|
||||
mut stmt: Statement,
|
||||
params: NuSqlParams,
|
||||
call_span: Span,
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
) -> Result<Value, SqliteError> {
|
||||
signals: &Signals,
|
||||
) -> Result<Value, SqliteOrShellError> {
|
||||
let column_names = stmt
|
||||
.column_names()
|
||||
.into_iter()
|
||||
@ -576,11 +577,7 @@ fn prepared_statement_to_nu_list(
|
||||
let mut row_values = vec![];
|
||||
|
||||
for row_result in row_results {
|
||||
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
|
||||
// return whatever we have so far, let the caller decide whether to use it
|
||||
return Ok(Value::list(row_values, call_span));
|
||||
}
|
||||
|
||||
signals.check(call_span)?;
|
||||
if let Ok(row_value) = row_result {
|
||||
row_values.push(row_value);
|
||||
}
|
||||
@ -606,11 +603,7 @@ fn prepared_statement_to_nu_list(
|
||||
let mut row_values = vec![];
|
||||
|
||||
for row_result in row_results {
|
||||
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
|
||||
// return whatever we have so far, let the caller decide whether to use it
|
||||
return Ok(Value::list(row_values, call_span));
|
||||
}
|
||||
|
||||
signals.check(call_span)?;
|
||||
if let Ok(row_value) = row_result {
|
||||
row_values.push(row_value);
|
||||
}
|
||||
@ -626,8 +619,8 @@ fn prepared_statement_to_nu_list(
|
||||
fn read_entire_sqlite_db(
|
||||
conn: Connection,
|
||||
call_span: Span,
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
) -> Result<Value, SqliteError> {
|
||||
signals: &Signals,
|
||||
) -> Result<Value, SqliteOrShellError> {
|
||||
let mut tables = Record::new();
|
||||
|
||||
let mut get_table_names =
|
||||
@ -638,12 +631,8 @@ fn read_entire_sqlite_db(
|
||||
let table_name: String = row?;
|
||||
// TODO: Should use params here?
|
||||
let table_stmt = conn.prepare(&format!("select * from [{table_name}]"))?;
|
||||
let rows = prepared_statement_to_nu_list(
|
||||
table_stmt,
|
||||
NuSqlParams::default(),
|
||||
call_span,
|
||||
ctrlc.clone(),
|
||||
)?;
|
||||
let rows =
|
||||
prepared_statement_to_nu_list(table_stmt, NuSqlParams::default(), call_span, signals)?;
|
||||
tables.push(table_name, rows);
|
||||
}
|
||||
|
||||
@ -710,7 +699,7 @@ mod test {
|
||||
#[test]
|
||||
fn can_read_empty_db() {
|
||||
let db = open_connection_in_memory().unwrap();
|
||||
let converted_db = read_entire_sqlite_db(db, Span::test_data(), None).unwrap();
|
||||
let converted_db = read_entire_sqlite_db(db, Span::test_data(), &Signals::empty()).unwrap();
|
||||
|
||||
let expected = Value::test_record(Record::new());
|
||||
|
||||
@ -730,7 +719,7 @@ mod test {
|
||||
[],
|
||||
)
|
||||
.unwrap();
|
||||
let converted_db = read_entire_sqlite_db(db, Span::test_data(), None).unwrap();
|
||||
let converted_db = read_entire_sqlite_db(db, Span::test_data(), &Signals::empty()).unwrap();
|
||||
|
||||
let expected = Value::test_record(record! {
|
||||
"person" => Value::test_list(vec![]),
|
||||
@ -759,7 +748,7 @@ mod test {
|
||||
db.execute("INSERT INTO item (id, name) VALUES (456, 'foo bar')", [])
|
||||
.unwrap();
|
||||
|
||||
let converted_db = read_entire_sqlite_db(db, span, None).unwrap();
|
||||
let converted_db = read_entire_sqlite_db(db, span, &Signals::empty()).unwrap();
|
||||
|
||||
let expected = Value::test_record(record! {
|
||||
"item" => Value::test_list(
|
||||
|
@ -50,7 +50,7 @@ impl Command for SubCommand {
|
||||
if matches!(input, PipelineData::Empty) {
|
||||
return Err(ShellError::PipelineEmpty { dst_span: head });
|
||||
}
|
||||
input.map(move |value| helper(value, head), engine_state.ctrlc.clone())
|
||||
input.map(move |value| helper(value, head), engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -40,7 +40,7 @@ impl Command for SubCommand {
|
||||
head,
|
||||
)
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -40,7 +40,7 @@ impl Command for SubCommand {
|
||||
if matches!(input, PipelineData::Empty) {
|
||||
return Err(ShellError::PipelineEmpty { dst_span: head });
|
||||
}
|
||||
input.map(move |value| helper(value, head), engine_state.ctrlc.clone())
|
||||
input.map(move |value| helper(value, head), engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -40,7 +40,7 @@ impl Command for SubCommand {
|
||||
if matches!(input, PipelineData::Empty) {
|
||||
return Err(ShellError::PipelineEmpty { dst_span: head });
|
||||
}
|
||||
input.map(move |value| helper(value, head), engine_state.ctrlc.clone())
|
||||
input.map(move |value| helper(value, head), engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -55,7 +55,7 @@ impl Command for SubCommand {
|
||||
}
|
||||
input.map(
|
||||
move |value| helper(value, head, &timezone),
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals(),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -46,7 +46,7 @@ impl Command for Debug {
|
||||
Value::string(x.to_expanded_string(", ", &config), head)
|
||||
}
|
||||
},
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals(),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -48,7 +48,7 @@ impl Command for MetadataSet {
|
||||
let ds_fp: Option<String> = call.get_flag(engine_state, stack, "datasource-filepath")?;
|
||||
let ds_ls = call.has_flag(engine_state, stack, "datasource-ls")?;
|
||||
let content_type: Option<String> = call.get_flag(engine_state, stack, "content-type")?;
|
||||
|
||||
let signals = engine_state.signals().clone();
|
||||
let metadata = input
|
||||
.metadata()
|
||||
.clone()
|
||||
@ -58,19 +58,15 @@ impl Command for MetadataSet {
|
||||
match (ds_fp, ds_ls) {
|
||||
(Some(path), false) => Ok(input.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
signals,
|
||||
metadata.with_data_source(DataSource::FilePath(path.into())),
|
||||
)),
|
||||
(None, true) => Ok(input.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
signals,
|
||||
metadata.with_data_source(DataSource::Ls),
|
||||
)),
|
||||
_ => Ok(input.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
)),
|
||||
_ => Ok(input.into_pipeline_data_with_metadata(head, signals, metadata)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,10 +3,9 @@ use crate::{DirBuilder, DirInfo, FileInfo};
|
||||
#[allow(deprecated)]
|
||||
use nu_engine::{command_prelude::*, current_dir};
|
||||
use nu_glob::Pattern;
|
||||
use nu_protocol::NuGlob;
|
||||
use nu_protocol::{NuGlob, Signals};
|
||||
use serde::Deserialize;
|
||||
use std::path::Path;
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Du;
|
||||
@ -120,8 +119,8 @@ impl Command for Du {
|
||||
min_size,
|
||||
};
|
||||
Ok(
|
||||
du_for_one_pattern(args, ¤t_dir, tag, engine_state.ctrlc.clone())?
|
||||
.into_pipeline_data(tag, engine_state.ctrlc.clone()),
|
||||
du_for_one_pattern(args, ¤t_dir, tag, engine_state.signals())?
|
||||
.into_pipeline_data(tag, engine_state.signals().clone()),
|
||||
)
|
||||
}
|
||||
Some(paths) => {
|
||||
@ -139,7 +138,7 @@ impl Command for Du {
|
||||
args,
|
||||
¤t_dir,
|
||||
tag,
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals(),
|
||||
)?)
|
||||
}
|
||||
|
||||
@ -147,7 +146,7 @@ impl Command for Du {
|
||||
Ok(result_iters
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.into_pipeline_data(tag, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(tag, engine_state.signals().clone()))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -164,8 +163,8 @@ impl Command for Du {
|
||||
fn du_for_one_pattern(
|
||||
args: DuArgs,
|
||||
current_dir: &Path,
|
||||
call_span: Span,
|
||||
ctrl_c: Option<Arc<AtomicBool>>,
|
||||
span: Span,
|
||||
signals: &Signals,
|
||||
) -> Result<impl Iterator<Item = Value> + Send, ShellError> {
|
||||
let exclude = args.exclude.map_or(Ok(None), move |x| {
|
||||
Pattern::new(x.item.as_ref())
|
||||
@ -178,7 +177,7 @@ fn du_for_one_pattern(
|
||||
|
||||
let include_files = args.all;
|
||||
let mut paths = match args.path {
|
||||
Some(p) => nu_engine::glob_from(&p, current_dir, call_span, None),
|
||||
Some(p) => nu_engine::glob_from(&p, current_dir, span, None),
|
||||
// The * pattern should never fail.
|
||||
None => nu_engine::glob_from(
|
||||
&Spanned {
|
||||
@ -186,7 +185,7 @@ fn du_for_one_pattern(
|
||||
span: Span::unknown(),
|
||||
},
|
||||
current_dir,
|
||||
call_span,
|
||||
span,
|
||||
None,
|
||||
),
|
||||
}
|
||||
@ -205,7 +204,7 @@ fn du_for_one_pattern(
|
||||
let min_size = args.min_size.map(|f| f.item as u64);
|
||||
|
||||
let params = DirBuilder {
|
||||
tag: call_span,
|
||||
tag: span,
|
||||
min: min_size,
|
||||
deref,
|
||||
exclude,
|
||||
@ -217,13 +216,13 @@ fn du_for_one_pattern(
|
||||
match p {
|
||||
Ok(a) => {
|
||||
if a.is_dir() {
|
||||
output.push(DirInfo::new(a, ¶ms, max_depth, ctrl_c.clone()).into());
|
||||
} else if let Ok(v) = FileInfo::new(a, deref, call_span) {
|
||||
output.push(DirInfo::new(a, ¶ms, max_depth, span, signals)?.into());
|
||||
} else if let Ok(v) = FileInfo::new(a, deref, span) {
|
||||
output.push(v.into());
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
output.push(Value::error(e, call_span));
|
||||
output.push(Value::error(e, span));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use nu_protocol::Signals;
|
||||
use wax::{Glob as WaxGlob, WalkBehavior, WalkEntry};
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -125,7 +125,6 @@ impl Command for Glob {
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let span = call.head;
|
||||
let glob_pattern: Spanned<String> = call.req(engine_state, stack, 0)?;
|
||||
let depth = call.get_flag(engine_state, stack, "depth")?;
|
||||
@ -216,7 +215,14 @@ impl Command for Glob {
|
||||
inner: vec![],
|
||||
})?
|
||||
.flatten();
|
||||
glob_to_value(ctrlc, glob_results, no_dirs, no_files, no_symlinks, span)
|
||||
glob_to_value(
|
||||
engine_state.signals(),
|
||||
glob_results,
|
||||
no_dirs,
|
||||
no_files,
|
||||
no_symlinks,
|
||||
span,
|
||||
)
|
||||
} else {
|
||||
let glob_results = glob
|
||||
.walk_with_behavior(
|
||||
@ -227,12 +233,19 @@ impl Command for Glob {
|
||||
},
|
||||
)
|
||||
.flatten();
|
||||
glob_to_value(ctrlc, glob_results, no_dirs, no_files, no_symlinks, span)
|
||||
glob_to_value(
|
||||
engine_state.signals(),
|
||||
glob_results,
|
||||
no_dirs,
|
||||
no_files,
|
||||
no_symlinks,
|
||||
span,
|
||||
)
|
||||
}?;
|
||||
|
||||
Ok(result
|
||||
.into_iter()
|
||||
.into_pipeline_data(span, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(span, engine_state.signals().clone()))
|
||||
}
|
||||
}
|
||||
|
||||
@ -252,7 +265,7 @@ fn convert_patterns(columns: &[Value]) -> Result<Vec<String>, ShellError> {
|
||||
}
|
||||
|
||||
fn glob_to_value<'a>(
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
signals: &Signals,
|
||||
glob_results: impl Iterator<Item = WalkEntry<'a>>,
|
||||
no_dirs: bool,
|
||||
no_files: bool,
|
||||
@ -261,10 +274,7 @@ fn glob_to_value<'a>(
|
||||
) -> Result<Vec<Value>, ShellError> {
|
||||
let mut result: Vec<Value> = Vec::new();
|
||||
for entry in glob_results {
|
||||
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
|
||||
result.clear();
|
||||
return Err(ShellError::InterruptedByUser { span: None });
|
||||
}
|
||||
signals.check(span)?;
|
||||
let file_type = entry.file_type();
|
||||
|
||||
if !(no_dirs && file_type.is_dir()
|
||||
|
@ -6,14 +6,13 @@ use nu_engine::glob_from;
|
||||
use nu_engine::{command_prelude::*, env::current_dir};
|
||||
use nu_glob::MatchOptions;
|
||||
use nu_path::expand_to_real_path;
|
||||
use nu_protocol::{DataSource, NuGlob, PipelineMetadata};
|
||||
use nu_protocol::{DataSource, NuGlob, PipelineMetadata, Signals};
|
||||
use pathdiff::diff_paths;
|
||||
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
|
||||
@ -93,7 +92,6 @@ impl Command for Ls {
|
||||
let du = call.has_flag(engine_state, stack, "du")?;
|
||||
let directory = call.has_flag(engine_state, stack, "directory")?;
|
||||
let use_mime_type = call.has_flag(engine_state, stack, "mime-type")?;
|
||||
let ctrl_c = engine_state.ctrlc.clone();
|
||||
let call_span = call.head;
|
||||
#[allow(deprecated)]
|
||||
let cwd = current_dir(engine_state, stack)?;
|
||||
@ -116,10 +114,10 @@ impl Command for Ls {
|
||||
Some(pattern_arg)
|
||||
};
|
||||
match input_pattern_arg {
|
||||
None => Ok(ls_for_one_pattern(None, args, ctrl_c.clone(), cwd)?
|
||||
None => Ok(ls_for_one_pattern(None, args, engine_state.signals(), cwd)?
|
||||
.into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
ctrl_c,
|
||||
engine_state.signals().clone(),
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
content_type: None,
|
||||
@ -131,7 +129,7 @@ impl Command for Ls {
|
||||
result_iters.push(ls_for_one_pattern(
|
||||
Some(pat),
|
||||
args,
|
||||
ctrl_c.clone(),
|
||||
engine_state.signals(),
|
||||
cwd.clone(),
|
||||
)?)
|
||||
}
|
||||
@ -143,7 +141,7 @@ impl Command for Ls {
|
||||
.flatten()
|
||||
.into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
ctrl_c,
|
||||
engine_state.signals().clone(),
|
||||
PipelineMetadata {
|
||||
data_source: DataSource::Ls,
|
||||
content_type: None,
|
||||
@ -215,7 +213,7 @@ impl Command for Ls {
|
||||
fn ls_for_one_pattern(
|
||||
pattern_arg: Option<Spanned<NuGlob>>,
|
||||
args: Args,
|
||||
ctrl_c: Option<Arc<AtomicBool>>,
|
||||
signals: &Signals,
|
||||
cwd: PathBuf,
|
||||
) -> Result<Box<dyn Iterator<Item = Value> + Send>, ShellError> {
|
||||
let Args {
|
||||
@ -342,7 +340,7 @@ fn ls_for_one_pattern(
|
||||
|
||||
let mut hidden_dirs = vec![];
|
||||
|
||||
let one_ctrl_c = ctrl_c.clone();
|
||||
let signals = signals.clone();
|
||||
Ok(Box::new(paths_peek.filter_map(move |x| match x {
|
||||
Ok(path) => {
|
||||
let metadata = match std::fs::symlink_metadata(&path) {
|
||||
@ -412,7 +410,7 @@ fn ls_for_one_pattern(
|
||||
call_span,
|
||||
long,
|
||||
du,
|
||||
one_ctrl_c.clone(),
|
||||
&signals,
|
||||
use_mime_type,
|
||||
);
|
||||
match entry {
|
||||
@ -474,7 +472,6 @@ fn path_contains_hidden_folder(path: &Path, folders: &[PathBuf]) -> bool {
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::FileTypeExt;
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
pub fn get_file_type(md: &std::fs::Metadata, display_name: &str, use_mime_type: bool) -> String {
|
||||
let ft = md.file_type();
|
||||
@ -523,7 +520,7 @@ pub(crate) fn dir_entry_dict(
|
||||
span: Span,
|
||||
long: bool,
|
||||
du: bool,
|
||||
ctrl_c: Option<Arc<AtomicBool>>,
|
||||
signals: &Signals,
|
||||
use_mime_type: bool,
|
||||
) -> Result<Value, ShellError> {
|
||||
#[cfg(windows)]
|
||||
@ -618,7 +615,7 @@ pub(crate) fn dir_entry_dict(
|
||||
if md.is_dir() {
|
||||
if du {
|
||||
let params = DirBuilder::new(Span::new(0, 2), None, false, None, false);
|
||||
let dir_size = DirInfo::new(filename, ¶ms, None, ctrl_c).get_size();
|
||||
let dir_size = DirInfo::new(filename, ¶ms, None, span, signals)?.get_size();
|
||||
|
||||
Value::filesize(dir_size as i64, span)
|
||||
} else {
|
||||
|
@ -51,7 +51,6 @@ impl Command for Open {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let raw = call.has_flag(engine_state, stack, "raw")?;
|
||||
let call_span = call.head;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
#[allow(deprecated)]
|
||||
let cwd = current_dir(engine_state, stack)?;
|
||||
let mut paths = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
||||
@ -122,8 +121,12 @@ impl Command for Open {
|
||||
} else {
|
||||
#[cfg(feature = "sqlite")]
|
||||
if !raw {
|
||||
let res = SQLiteDatabase::try_from_path(path, arg_span, ctrlc.clone())
|
||||
.map(|db| db.into_value(call.head).into_pipeline_data());
|
||||
let res = SQLiteDatabase::try_from_path(
|
||||
path,
|
||||
arg_span,
|
||||
engine_state.signals().clone(),
|
||||
)
|
||||
.map(|db| db.into_value(call.head).into_pipeline_data());
|
||||
|
||||
if res.is_ok() {
|
||||
return res;
|
||||
@ -144,7 +147,7 @@ impl Command for Open {
|
||||
};
|
||||
|
||||
let stream = PipelineData::ByteStream(
|
||||
ByteStream::file(file, call_span, ctrlc.clone()),
|
||||
ByteStream::file(file, call_span, engine_state.signals().clone()),
|
||||
Some(PipelineMetadata {
|
||||
data_source: DataSource::FilePath(path.to_path_buf()),
|
||||
content_type: None,
|
||||
@ -203,7 +206,7 @@ impl Command for Open {
|
||||
Ok(output
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.into_pipeline_data(call_span, ctrlc))
|
||||
.into_pipeline_data(call_span, engine_state.signals().clone()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -451,12 +451,7 @@ fn rm(
|
||||
});
|
||||
|
||||
for result in iter {
|
||||
if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) {
|
||||
return Err(ShellError::InterruptedByUser {
|
||||
span: Some(call.head),
|
||||
});
|
||||
}
|
||||
|
||||
engine_state.signals().check(call.head)?;
|
||||
match result {
|
||||
Ok(None) => {}
|
||||
Ok(Some(msg)) => eprintln!("{msg}"),
|
||||
|
@ -5,15 +5,14 @@ use nu_engine::{command_prelude::*, current_dir};
|
||||
use nu_path::expand_path_with;
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression},
|
||||
byte_stream::copy_with_interrupt,
|
||||
byte_stream::copy_with_signals,
|
||||
process::ChildPipe,
|
||||
ByteStreamSource, DataSource, OutDest, PipelineMetadata,
|
||||
ByteStreamSource, DataSource, OutDest, PipelineMetadata, Signals,
|
||||
};
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{self, BufRead, BufReader, Read, Write},
|
||||
path::{Path, PathBuf},
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
thread,
|
||||
};
|
||||
|
||||
@ -120,30 +119,30 @@ impl Command for Save {
|
||||
)?;
|
||||
|
||||
let size = stream.known_size();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let signals = engine_state.signals();
|
||||
|
||||
match stream.into_source() {
|
||||
ByteStreamSource::Read(read) => {
|
||||
stream_to_file(read, size, ctrlc, file, span, progress)?;
|
||||
stream_to_file(read, size, signals, file, span, progress)?;
|
||||
}
|
||||
ByteStreamSource::File(source) => {
|
||||
stream_to_file(source, size, ctrlc, file, span, progress)?;
|
||||
stream_to_file(source, size, signals, file, span, progress)?;
|
||||
}
|
||||
ByteStreamSource::Child(mut child) => {
|
||||
fn write_or_consume_stderr(
|
||||
stderr: ChildPipe,
|
||||
file: Option<File>,
|
||||
span: Span,
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
signals: &Signals,
|
||||
progress: bool,
|
||||
) -> Result<(), ShellError> {
|
||||
if let Some(file) = file {
|
||||
match stderr {
|
||||
ChildPipe::Pipe(pipe) => {
|
||||
stream_to_file(pipe, None, ctrlc, file, span, progress)
|
||||
stream_to_file(pipe, None, signals, file, span, progress)
|
||||
}
|
||||
ChildPipe::Tee(tee) => {
|
||||
stream_to_file(tee, None, ctrlc, file, span, progress)
|
||||
stream_to_file(tee, None, signals, file, span, progress)
|
||||
}
|
||||
}?
|
||||
} else {
|
||||
@ -163,14 +162,14 @@ impl Command for Save {
|
||||
// delegate a thread to redirect stderr to result.
|
||||
let handler = stderr
|
||||
.map(|stderr| {
|
||||
let ctrlc = ctrlc.clone();
|
||||
let signals = signals.clone();
|
||||
thread::Builder::new().name("stderr saver".into()).spawn(
|
||||
move || {
|
||||
write_or_consume_stderr(
|
||||
stderr,
|
||||
stderr_file,
|
||||
span,
|
||||
ctrlc,
|
||||
&signals,
|
||||
progress,
|
||||
)
|
||||
},
|
||||
@ -181,10 +180,10 @@ impl Command for Save {
|
||||
|
||||
let res = match stdout {
|
||||
ChildPipe::Pipe(pipe) => {
|
||||
stream_to_file(pipe, None, ctrlc, file, span, progress)
|
||||
stream_to_file(pipe, None, signals, file, span, progress)
|
||||
}
|
||||
ChildPipe::Tee(tee) => {
|
||||
stream_to_file(tee, None, ctrlc, file, span, progress)
|
||||
stream_to_file(tee, None, signals, file, span, progress)
|
||||
}
|
||||
};
|
||||
if let Some(h) = handler {
|
||||
@ -202,7 +201,7 @@ impl Command for Save {
|
||||
stderr,
|
||||
stderr_file,
|
||||
span,
|
||||
ctrlc,
|
||||
signals,
|
||||
progress,
|
||||
)?;
|
||||
}
|
||||
@ -510,7 +509,7 @@ fn get_files(
|
||||
fn stream_to_file(
|
||||
source: impl Read,
|
||||
known_size: Option<u64>,
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
signals: &Signals,
|
||||
mut file: File,
|
||||
span: Span,
|
||||
progress: bool,
|
||||
@ -526,9 +525,9 @@ fn stream_to_file(
|
||||
let mut reader = BufReader::new(source);
|
||||
|
||||
let res = loop {
|
||||
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
|
||||
if let Err(err) = signals.check(span) {
|
||||
bar.abandoned_msg("# Cancelled #".to_owned());
|
||||
return Ok(());
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
match reader.fill_buf() {
|
||||
@ -555,7 +554,7 @@ fn stream_to_file(
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
copy_with_interrupt(source, file, span, ctrlc.as_deref())?;
|
||||
copy_with_signals(source, file, span, signals)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -143,7 +143,6 @@ impl Command for Watch {
|
||||
None => RecursiveMode::Recursive,
|
||||
};
|
||||
|
||||
let ctrlc_ref = &engine_state.ctrlc.clone();
|
||||
let (tx, rx) = channel();
|
||||
|
||||
let mut debouncer = match new_debouncer(debounce_duration, None, tx) {
|
||||
@ -256,7 +255,7 @@ impl Command for Watch {
|
||||
}
|
||||
Err(RecvTimeoutError::Timeout) => {}
|
||||
}
|
||||
if nu_utils::ctrl_c::was_pressed(ctrlc_ref) {
|
||||
if engine_state.signals().interrupted() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ only unwrap the outer list, and leave the variable's contents untouched."#
|
||||
Ok(input
|
||||
.into_iter()
|
||||
.chain(other.into_pipeline_data())
|
||||
.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(call.head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -140,7 +140,7 @@ pub fn compact(
|
||||
_ => true,
|
||||
}
|
||||
},
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals(),
|
||||
)
|
||||
.map(|m| m.set_metadata(metadata))
|
||||
}
|
||||
|
@ -80,8 +80,6 @@ fn default(
|
||||
let value: Value = call.req(engine_state, stack, 0)?;
|
||||
let column: Option<Spanned<String>> = call.opt(engine_state, stack, 1)?;
|
||||
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
if let Some(column) = column {
|
||||
input
|
||||
.map(
|
||||
@ -109,7 +107,7 @@ fn default(
|
||||
}
|
||||
_ => item,
|
||||
},
|
||||
ctrlc,
|
||||
engine_state.signals(),
|
||||
)
|
||||
.map(|x| x.set_metadata(metadata))
|
||||
} else if input.is_nothing() {
|
||||
@ -121,7 +119,7 @@ fn default(
|
||||
Value::Nothing { .. } => value.clone(),
|
||||
x => x,
|
||||
},
|
||||
ctrlc,
|
||||
engine_state.signals(),
|
||||
)
|
||||
.map(|x| x.set_metadata(metadata))
|
||||
}
|
||||
|
@ -102,7 +102,11 @@ fn drop_cols(
|
||||
Err(e) => Value::error(e, head),
|
||||
}
|
||||
}))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
|
@ -156,7 +156,7 @@ impl Command for DropNth {
|
||||
.take(start)
|
||||
.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
@ -177,7 +177,7 @@ impl Command for DropNth {
|
||||
rows,
|
||||
current: 0,
|
||||
}
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -140,7 +140,7 @@ with 'transpose' first."#
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
}
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
if let Some(chunks) = stream.chunks() {
|
||||
@ -171,7 +171,7 @@ with 'transpose' first."#
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
@ -185,7 +185,7 @@ with 'transpose' first."#
|
||||
.and_then(|x| {
|
||||
x.filter(
|
||||
move |x| if !keep_empty { !x.is_nothing() } else { true },
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals(),
|
||||
)
|
||||
})
|
||||
.map(|data| data.set_metadata(metadata))
|
||||
|
@ -52,7 +52,6 @@ impl Command for Enumerate {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let metadata = input.metadata();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
Ok(input
|
||||
.into_iter()
|
||||
@ -66,7 +65,7 @@ impl Command for Enumerate {
|
||||
head,
|
||||
)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,7 +78,7 @@ impl Command for Every {
|
||||
None
|
||||
}
|
||||
})
|
||||
.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(call.head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
}
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
if let Some(chunks) = stream.chunks() {
|
||||
@ -97,7 +97,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
@ -117,7 +117,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
|
||||
Some(Value::error(err, span))
|
||||
}
|
||||
}
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
}
|
||||
}
|
||||
.map(|data| data.set_metadata(metadata))
|
||||
|
@ -213,7 +213,6 @@ fn find_with_regex(
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let config = engine_state.get_config().clone();
|
||||
|
||||
let insensitive = call.has_flag(engine_state, stack, "ignore-case")?;
|
||||
@ -246,7 +245,7 @@ fn find_with_regex(
|
||||
Value::List { vals, .. } => values_match_find(vals, &re, &config, invert),
|
||||
_ => false,
|
||||
},
|
||||
ctrlc,
|
||||
engine_state.signals(),
|
||||
)
|
||||
}
|
||||
|
||||
@ -349,18 +348,16 @@ fn find_with_rest_and_highlight(
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let engine_state = engine_state.clone();
|
||||
let config = engine_state.get_config().clone();
|
||||
let filter_config = engine_state.get_config().clone();
|
||||
let invert = call.has_flag(&engine_state, stack, "invert")?;
|
||||
let terms = call.rest::<Value>(&engine_state, stack, 0)?;
|
||||
let invert = call.has_flag(engine_state, stack, "invert")?;
|
||||
let terms = call.rest::<Value>(engine_state, stack, 0)?;
|
||||
let lower_terms = terms
|
||||
.iter()
|
||||
.map(|v| Value::string(v.to_expanded_string("", &config).to_lowercase(), span))
|
||||
.collect::<Vec<Value>>();
|
||||
|
||||
let style_computer = StyleComputer::from_config(&engine_state, stack);
|
||||
let style_computer = StyleComputer::from_config(engine_state, stack);
|
||||
// Currently, search results all use the same style.
|
||||
// Also note that this sample string is passed into user-written code (the closure that may or may not be
|
||||
// defined for "string").
|
||||
@ -369,7 +366,7 @@ fn find_with_rest_and_highlight(
|
||||
style_computer.compute("search_result", &Value::string("search result", span));
|
||||
|
||||
let cols_to_search_in_map: Vec<_> = call
|
||||
.get_flag(&engine_state, stack, "columns")?
|
||||
.get_flag(engine_state, stack, "columns")?
|
||||
.unwrap_or_default();
|
||||
|
||||
let cols_to_search_in_filter = cols_to_search_in_map.clone();
|
||||
@ -401,7 +398,7 @@ fn find_with_rest_and_highlight(
|
||||
_ => x,
|
||||
}
|
||||
},
|
||||
ctrlc.clone(),
|
||||
engine_state.signals(),
|
||||
)?
|
||||
.filter(
|
||||
move |value| {
|
||||
@ -414,7 +411,7 @@ fn find_with_rest_and_highlight(
|
||||
invert,
|
||||
)
|
||||
},
|
||||
ctrlc,
|
||||
engine_state.signals(),
|
||||
),
|
||||
PipelineData::ListStream(stream, metadata) => {
|
||||
let stream = stream.modify(|iter| {
|
||||
|
@ -1,4 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::Signals;
|
||||
use std::io::Read;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -133,8 +134,7 @@ fn first_helper(
|
||||
}
|
||||
}
|
||||
Value::Range { val, .. } => {
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let mut iter = val.into_range_iter(span, ctrlc.clone());
|
||||
let mut iter = val.into_range_iter(span, Signals::empty());
|
||||
if return_single_element {
|
||||
if let Some(v) = iter.next() {
|
||||
Ok(v.into_pipeline_data())
|
||||
@ -142,9 +142,11 @@ fn first_helper(
|
||||
Err(ShellError::AccessEmptyContent { span: head })
|
||||
}
|
||||
} else {
|
||||
Ok(iter
|
||||
.take(rows)
|
||||
.into_pipeline_data_with_metadata(span, ctrlc, metadata))
|
||||
Ok(iter.take(rows).into_pipeline_data_with_metadata(
|
||||
span,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
}
|
||||
// Propagate errors by explicitly matching them before the final case.
|
||||
@ -189,7 +191,7 @@ fn first_helper(
|
||||
ByteStream::read(
|
||||
reader.take(rows as u64),
|
||||
head,
|
||||
None,
|
||||
Signals::empty(),
|
||||
ByteStreamType::Binary,
|
||||
),
|
||||
metadata,
|
||||
|
@ -127,7 +127,7 @@ fn flatten(
|
||||
input
|
||||
.flat_map(
|
||||
move |item| flat_value(&columns, item, flatten_all),
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals(),
|
||||
)
|
||||
.map(|x| x.set_metadata(metadata))
|
||||
}
|
||||
|
@ -62,7 +62,6 @@ If multiple cell paths are given, this will produce a list of values."#
|
||||
let mut rest: Vec<CellPath> = call.rest(engine_state, stack, 1)?;
|
||||
let ignore_errors = call.has_flag(engine_state, stack, "ignore-errors")?;
|
||||
let sensitive = call.has_flag(engine_state, stack, "sensitive")?;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let metadata = input.metadata();
|
||||
|
||||
if ignore_errors {
|
||||
@ -89,7 +88,9 @@ If multiple cell paths are given, this will produce a list of values."#
|
||||
output.push(val?);
|
||||
}
|
||||
|
||||
Ok(output.into_iter().into_pipeline_data(span, ctrlc))
|
||||
Ok(output
|
||||
.into_iter()
|
||||
.into_pipeline_data(span, engine_state.signals().clone()))
|
||||
}
|
||||
.map(|x| x.set_metadata(metadata))
|
||||
}
|
||||
|
@ -55,18 +55,19 @@ impl Command for Group {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let group_size: Spanned<usize> = call.req(engine_state, stack, 0)?;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let metadata = input.metadata();
|
||||
|
||||
//FIXME: add in support for external redirection when engine-q supports it generally
|
||||
|
||||
let each_group_iterator = EachGroupIterator {
|
||||
group_size: group_size.item,
|
||||
input: Box::new(input.into_iter()),
|
||||
span: head,
|
||||
};
|
||||
|
||||
Ok(each_group_iterator.into_pipeline_data_with_metadata(head, ctrlc, metadata))
|
||||
Ok(each_group_iterator.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -222,7 +222,11 @@ fn insert(
|
||||
Ok(pre_elems
|
||||
.into_iter()
|
||||
.chain(stream)
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
} else if let Value::Closure { val, .. } = replacement {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, *val);
|
||||
let stream = stream.map(move |mut value| {
|
||||
|
@ -147,7 +147,7 @@ interleave
|
||||
// Now that threads are writing to the channel, we just return it as a stream
|
||||
Ok(rx
|
||||
.into_iter()
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -67,7 +67,7 @@ impl Command for Items {
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
}
|
||||
Value::Error { error, .. } => Err(*error),
|
||||
other => Err(ShellError::OnlySupportsThisInputType {
|
||||
|
@ -99,14 +99,10 @@ impl Command for Last {
|
||||
let mut buf = VecDeque::new();
|
||||
|
||||
for row in iterator {
|
||||
if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) {
|
||||
return Err(ShellError::InterruptedByUser { span: Some(head) });
|
||||
}
|
||||
|
||||
engine_state.signals().check(head)?;
|
||||
if buf.len() == rows {
|
||||
buf.pop_front();
|
||||
}
|
||||
|
||||
buf.push_back(row);
|
||||
}
|
||||
|
||||
|
@ -26,7 +26,6 @@ impl Command for Lines {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let skip_empty = call.has_flag(engine_state, stack, "skip-empty")?;
|
||||
|
||||
let span = input.span().unwrap_or(call.head);
|
||||
@ -91,7 +90,7 @@ impl Command for Lines {
|
||||
Ok(line) => Value::string(line, head),
|
||||
Err(err) => Value::error(err, head),
|
||||
})
|
||||
.into_pipeline_data(head, ctrlc))
|
||||
.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
} else {
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
|
@ -88,7 +88,6 @@ repeating this process with row 1, and so on."#
|
||||
let head = call.head;
|
||||
let merge_value: Value = call.req(engine_state, stack, 0)?;
|
||||
let metadata = input.metadata();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
match (&input, merge_value) {
|
||||
// table (list of records)
|
||||
@ -110,7 +109,11 @@ repeating this process with row 1, and so on."#
|
||||
(Err(error), _) => Value::error(error, head),
|
||||
});
|
||||
|
||||
Ok(res.into_pipeline_data_with_metadata(head, ctrlc, metadata))
|
||||
Ok(res.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
// record
|
||||
(
|
||||
|
@ -144,7 +144,6 @@ impl Command for Move {
|
||||
};
|
||||
|
||||
let metadata = input.metadata();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
match input {
|
||||
PipelineData::Value(Value::List { .. }, ..) | PipelineData::ListStream { .. } => {
|
||||
@ -158,7 +157,11 @@ impl Command for Move {
|
||||
Err(error) => Value::error(error, head),
|
||||
});
|
||||
|
||||
Ok(res.into_pipeline_data_with_metadata(head, ctrlc, metadata))
|
||||
Ok(res.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
PipelineData::Value(Value::Record { val, .. }, ..) => {
|
||||
Ok(move_record_columns(&val, &columns, &before_or_after, head)?
|
||||
|
@ -1,6 +1,6 @@
|
||||
use super::utils::chain_error_with_input;
|
||||
use nu_engine::{command_prelude::*, ClosureEvalOnce};
|
||||
use nu_protocol::engine::Closure;
|
||||
use nu_protocol::{engine::Closure, Signals};
|
||||
use rayon::prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -158,12 +158,11 @@ impl Command for ParEach {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
apply_order(vec).into_pipeline_data(span, engine_state.ctrlc.clone())
|
||||
apply_order(vec).into_pipeline_data(span, engine_state.signals().clone())
|
||||
})),
|
||||
Value::Range { val, .. } => Ok(create_pool(max_threads)?.install(|| {
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let vec = val
|
||||
.into_range_iter(span, ctrlc.clone())
|
||||
.into_range_iter(span, Signals::empty())
|
||||
.enumerate()
|
||||
.par_bridge()
|
||||
.map(move |(index, value)| {
|
||||
@ -184,7 +183,7 @@ impl Command for ParEach {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
apply_order(vec).into_pipeline_data(span, ctrlc)
|
||||
apply_order(vec).into_pipeline_data(span, engine_state.signals().clone())
|
||||
})),
|
||||
// This match allows non-iterables to be accepted,
|
||||
// which is currently considered undesirable (Nov 2022).
|
||||
@ -212,7 +211,7 @@ impl Command for ParEach {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
|
||||
apply_order(vec).into_pipeline_data(head, engine_state.signals().clone())
|
||||
})),
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
if let Some(chunks) = stream.chunks() {
|
||||
@ -236,14 +235,14 @@ impl Command for ParEach {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
|
||||
apply_order(vec).into_pipeline_data(head, engine_state.signals().clone())
|
||||
}))
|
||||
} else {
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
}
|
||||
}
|
||||
.and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.ctrlc.clone()))
|
||||
.and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.signals()))
|
||||
.map(|data| data.set_metadata(metadata))
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +117,7 @@ only unwrap the outer list, and leave the variable's contents untouched."#
|
||||
.into_pipeline_data()
|
||||
.into_iter()
|
||||
.chain(input)
|
||||
.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(call.head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,7 +106,7 @@ impl Command for Range {
|
||||
Ok(PipelineData::Value(Value::nothing(head), None))
|
||||
} else {
|
||||
let iter = v.into_iter().skip(from).take(to - from + 1);
|
||||
Ok(iter.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
Ok(iter.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
}
|
||||
} else {
|
||||
let from = start as usize;
|
||||
@ -116,7 +116,7 @@ impl Command for Range {
|
||||
Ok(PipelineData::Value(Value::nothing(head), None))
|
||||
} else {
|
||||
let iter = input.into_iter().skip(from).take(to - from + 1);
|
||||
Ok(iter.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
Ok(iter.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
}
|
||||
}
|
||||
.map(|x| x.set_metadata(metadata))
|
||||
|
@ -107,10 +107,7 @@ impl Command for Reduce {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
|
||||
for value in iter {
|
||||
if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) {
|
||||
break;
|
||||
}
|
||||
|
||||
engine_state.signals().check(head)?;
|
||||
acc = closure
|
||||
.add_arg(value)
|
||||
.add_arg(acc)
|
||||
|
@ -221,7 +221,7 @@ fn rename(
|
||||
),
|
||||
}
|
||||
},
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals(),
|
||||
)
|
||||
.map(|data| data.set_metadata(metadata))
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ impl Command for Reverse {
|
||||
let metadata = input.metadata();
|
||||
let values = input.into_iter_strict(head)?.collect::<Vec<_>>();
|
||||
let iter = values.into_iter().rev();
|
||||
Ok(iter.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
Ok(iter.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -215,7 +215,11 @@ fn select(
|
||||
rows: unique_rows.into_iter().peekable(),
|
||||
current: 0,
|
||||
}
|
||||
.into_pipeline_data_with_metadata(call_span, engine_state.ctrlc.clone(), metadata)
|
||||
.into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
)
|
||||
} else {
|
||||
input
|
||||
};
|
||||
@ -255,7 +259,7 @@ fn select(
|
||||
|
||||
Ok(output.into_iter().into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
@ -304,7 +308,7 @@ fn select(
|
||||
|
||||
Ok(values.into_pipeline_data_with_metadata(
|
||||
call_span,
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
|
@ -33,7 +33,11 @@ impl Command for Shuffle {
|
||||
let mut values = input.into_iter_strict(call.head)?.collect::<Vec<_>>();
|
||||
values.shuffle(&mut thread_rng());
|
||||
let iter = values.into_iter();
|
||||
Ok(iter.into_pipeline_data_with_metadata(call.head, engine_state.ctrlc.clone(), metadata))
|
||||
Ok(iter.into_pipeline_data_with_metadata(
|
||||
call.head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -1,4 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::Signals;
|
||||
use std::io::{self, Read};
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -90,8 +91,6 @@ impl Command for Skip {
|
||||
}
|
||||
None => 1,
|
||||
};
|
||||
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let input_span = input.span().unwrap_or(call.head);
|
||||
match input {
|
||||
PipelineData::ByteStream(stream, metadata) => {
|
||||
@ -102,7 +101,12 @@ impl Command for Skip {
|
||||
io::copy(&mut (&mut reader).take(n as u64), &mut io::sink())
|
||||
.err_span(span)?;
|
||||
Ok(PipelineData::ByteStream(
|
||||
ByteStream::read(reader, call.head, None, ByteStreamType::Binary),
|
||||
ByteStream::read(
|
||||
reader,
|
||||
call.head,
|
||||
Signals::empty(),
|
||||
ByteStreamType::Binary,
|
||||
),
|
||||
metadata,
|
||||
))
|
||||
} else {
|
||||
@ -124,7 +128,11 @@ impl Command for Skip {
|
||||
_ => Ok(input
|
||||
.into_iter_strict(call.head)?
|
||||
.skip(n)
|
||||
.into_pipeline_data_with_metadata(input_span, ctrlc, metadata)),
|
||||
.into_pipeline_data_with_metadata(
|
||||
input_span,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -89,7 +89,7 @@ impl Command for SkipUntil {
|
||||
.map(|cond| cond.is_false())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ impl Command for SkipWhile {
|
||||
.map(|cond| cond.is_true())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -173,7 +173,7 @@ impl Command for Sort {
|
||||
let iter = vec.into_iter();
|
||||
Ok(iter.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ impl Command for SortBy {
|
||||
}
|
||||
|
||||
let iter = vec.into_iter();
|
||||
Ok(iter.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
Ok(iter.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::Signals;
|
||||
use std::io::Read;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -46,7 +47,6 @@ impl Command for Take {
|
||||
let head = call.head;
|
||||
let rows_desired: usize = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let metadata = input.metadata();
|
||||
|
||||
match input {
|
||||
@ -56,15 +56,23 @@ impl Command for Take {
|
||||
Value::List { vals, .. } => Ok(vals
|
||||
.into_iter()
|
||||
.take(rows_desired)
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
)),
|
||||
Value::Binary { val, .. } => {
|
||||
let slice: Vec<u8> = val.into_iter().take(rows_desired).collect();
|
||||
Ok(PipelineData::Value(Value::binary(slice, span), metadata))
|
||||
}
|
||||
Value::Range { val, .. } => Ok(val
|
||||
.into_range_iter(span, ctrlc.clone())
|
||||
.into_range_iter(span, Signals::empty())
|
||||
.take(rows_desired)
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
)),
|
||||
// Propagate errors by explicitly matching them before the final case.
|
||||
Value::Error { error, .. } => Err(*error),
|
||||
other => Err(ShellError::OnlySupportsThisInputType {
|
||||
@ -87,7 +95,7 @@ impl Command for Take {
|
||||
ByteStream::read(
|
||||
reader.take(rows_desired as u64),
|
||||
head,
|
||||
None,
|
||||
Signals::empty(),
|
||||
ByteStreamType::Binary,
|
||||
),
|
||||
metadata,
|
||||
|
@ -85,7 +85,7 @@ impl Command for TakeUntil {
|
||||
.map(|cond| cond.is_false())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -85,7 +85,7 @@ impl Command for TakeWhile {
|
||||
.map(|cond| cond.is_true())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,15 +1,11 @@
|
||||
use nu_engine::{command_prelude::*, get_eval_block_with_early_return};
|
||||
use nu_protocol::{
|
||||
byte_stream::copy_with_interrupt, engine::Closure, process::ChildPipe, ByteStream,
|
||||
ByteStreamSource, OutDest, PipelineMetadata,
|
||||
byte_stream::copy_with_signals, engine::Closure, process::ChildPipe, ByteStream,
|
||||
ByteStreamSource, OutDest, PipelineMetadata, Signals,
|
||||
};
|
||||
use std::{
|
||||
io::{self, Read, Write},
|
||||
sync::{
|
||||
atomic::AtomicBool,
|
||||
mpsc::{self, Sender},
|
||||
Arc,
|
||||
},
|
||||
sync::mpsc::{self, Sender},
|
||||
thread::{self, JoinHandle},
|
||||
};
|
||||
|
||||
@ -103,12 +99,11 @@ use it in your pipeline."#
|
||||
|
||||
if let PipelineData::ByteStream(stream, metadata) = input {
|
||||
let span = stream.span();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let type_ = stream.type_();
|
||||
|
||||
let info = StreamInfo {
|
||||
span,
|
||||
ctrlc: ctrlc.clone(),
|
||||
signals: engine_state.signals().clone(),
|
||||
type_,
|
||||
metadata: metadata.clone(),
|
||||
};
|
||||
@ -123,7 +118,7 @@ use it in your pipeline."#
|
||||
let tee = IoTee::new(read, tee_thread);
|
||||
|
||||
Ok(PipelineData::ByteStream(
|
||||
ByteStream::read(tee, span, ctrlc, type_),
|
||||
ByteStream::read(tee, span, engine_state.signals().clone(), type_),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
@ -136,7 +131,7 @@ use it in your pipeline."#
|
||||
let tee = IoTee::new(file, tee_thread);
|
||||
|
||||
Ok(PipelineData::ByteStream(
|
||||
ByteStream::read(tee, span, ctrlc, type_),
|
||||
ByteStream::read(tee, span, engine_state.signals().clone(), type_),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
@ -234,19 +229,19 @@ use it in your pipeline."#
|
||||
}
|
||||
|
||||
let span = input.span().unwrap_or(head);
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let metadata = input.metadata();
|
||||
let metadata_clone = metadata.clone();
|
||||
let signals = engine_state.signals().clone();
|
||||
|
||||
Ok(tee(input.into_iter(), move |rx| {
|
||||
let input = rx.into_pipeline_data_with_metadata(span, ctrlc, metadata_clone);
|
||||
let input = rx.into_pipeline_data_with_metadata(span, signals, metadata_clone);
|
||||
eval_block(input)
|
||||
})
|
||||
.err_span(call.head)?
|
||||
.map(move |result| result.unwrap_or_else(|err| Value::error(err, closure_span)))
|
||||
.into_pipeline_data_with_metadata(
|
||||
span,
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
@ -386,8 +381,13 @@ fn spawn_tee(
|
||||
let thread = thread::Builder::new()
|
||||
.name("tee".into())
|
||||
.spawn(move || {
|
||||
// We don't use ctrlc here because we assume it already has it on the other side
|
||||
let stream = ByteStream::from_iter(receiver.into_iter(), info.span, None, info.type_);
|
||||
// We use Signals::empty() here because we assume there already is a Signals on the other side
|
||||
let stream = ByteStream::from_iter(
|
||||
receiver.into_iter(),
|
||||
info.span,
|
||||
Signals::empty(),
|
||||
info.type_,
|
||||
);
|
||||
eval_block(PipelineData::ByteStream(stream, info.metadata))
|
||||
})
|
||||
.err_span(info.span)?;
|
||||
@ -398,13 +398,13 @@ fn spawn_tee(
|
||||
#[derive(Clone)]
|
||||
struct StreamInfo {
|
||||
span: Span,
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
signals: Signals,
|
||||
type_: ByteStreamType,
|
||||
metadata: Option<PipelineMetadata>,
|
||||
}
|
||||
|
||||
fn copy(src: impl Read, dest: impl Write, info: &StreamInfo) -> Result<(), ShellError> {
|
||||
copy_with_interrupt(src, dest, info.span, info.ctrlc.as_deref())?;
|
||||
copy_with_signals(src, dest, info.span, &info.signals)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -421,11 +421,11 @@ fn copy_on_thread(
|
||||
info: &StreamInfo,
|
||||
) -> Result<JoinHandle<Result<(), ShellError>>, ShellError> {
|
||||
let span = info.span;
|
||||
let ctrlc = info.ctrlc.clone();
|
||||
let signals = info.signals.clone();
|
||||
thread::Builder::new()
|
||||
.name("stderr copier".into())
|
||||
.spawn(move || {
|
||||
copy_with_interrupt(src, dest, span, ctrlc.as_deref())?;
|
||||
copy_with_signals(src, dest, span, &signals)?;
|
||||
Ok(())
|
||||
})
|
||||
.map_err(|e| e.into_spanned(span).into())
|
||||
|
@ -173,7 +173,6 @@ pub fn transpose(
|
||||
});
|
||||
}
|
||||
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let metadata = input.metadata();
|
||||
let input: Vec<_> = input.into_iter().collect();
|
||||
|
||||
@ -284,7 +283,11 @@ pub fn transpose(
|
||||
metadata,
|
||||
))
|
||||
} else {
|
||||
Ok(result_data.into_pipeline_data_with_metadata(name, ctrlc, metadata))
|
||||
Ok(result_data.into_pipeline_data_with_metadata(
|
||||
name,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -241,18 +241,18 @@ pub fn uniq(
|
||||
item_mapper: Box<dyn Fn(ItemMapperState) -> ValueCounter>,
|
||||
metadata: Option<PipelineMetadata>,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let head = call.head;
|
||||
let flag_show_count = call.has_flag(engine_state, stack, "count")?;
|
||||
let flag_show_repeated = call.has_flag(engine_state, stack, "repeated")?;
|
||||
let flag_ignore_case = call.has_flag(engine_state, stack, "ignore-case")?;
|
||||
let flag_only_uniques = call.has_flag(engine_state, stack, "unique")?;
|
||||
|
||||
let signals = engine_state.signals().clone();
|
||||
let uniq_values = input
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map_while(|(index, item)| {
|
||||
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
|
||||
if signals.interrupted() {
|
||||
return None;
|
||||
}
|
||||
Some(item_mapper(ItemMapperState {
|
||||
|
@ -187,7 +187,11 @@ fn update(
|
||||
Ok(pre_elems
|
||||
.into_iter()
|
||||
.chain(stream)
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
} else if let Value::Closure { val, .. } = replacement {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, *val);
|
||||
let stream = stream.map(move |mut value| {
|
||||
|
@ -247,7 +247,11 @@ fn upsert(
|
||||
Ok(pre_elems
|
||||
.into_iter()
|
||||
.chain(stream)
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
} else if let Value::Closure { val, .. } = replacement {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, *val);
|
||||
let stream = stream.map(move |mut value| {
|
||||
|
@ -32,10 +32,7 @@ pub fn boolean_fold(
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
|
||||
for value in input {
|
||||
if nu_utils::ctrl_c::was_pressed(&engine_state.ctrlc) {
|
||||
break;
|
||||
}
|
||||
|
||||
engine_state.signals().check(head)?;
|
||||
let pred = closure.run_with_value(value)?.into_value(head)?.is_true();
|
||||
|
||||
if pred == accumulator {
|
||||
|
@ -134,7 +134,7 @@ fn values(
|
||||
head: Span,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let signals = engine_state.signals().clone();
|
||||
let metadata = input.metadata();
|
||||
match input {
|
||||
PipelineData::Empty => Ok(PipelineData::Empty),
|
||||
@ -144,7 +144,7 @@ fn values(
|
||||
Value::List { vals, .. } => match get_values(&vals, head, span) {
|
||||
Ok(cols) => Ok(cols
|
||||
.into_iter()
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
.into_pipeline_data_with_metadata(head, signals, metadata)),
|
||||
Err(err) => Err(err),
|
||||
},
|
||||
Value::Custom { val, .. } => {
|
||||
@ -152,7 +152,7 @@ fn values(
|
||||
match get_values(&[input_as_base_value], head, span) {
|
||||
Ok(cols) => Ok(cols
|
||||
.into_iter()
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
.into_pipeline_data_with_metadata(head, signals, metadata)),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
@ -160,7 +160,7 @@ fn values(
|
||||
.values()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
.into_pipeline_data_with_metadata(head, signals, metadata)),
|
||||
// Propagate errors
|
||||
Value::Error { error, .. } => Err(*error),
|
||||
other => Err(ShellError::OnlySupportsThisInputType {
|
||||
@ -176,7 +176,7 @@ fn values(
|
||||
match get_values(&vals, head, head) {
|
||||
Ok(cols) => Ok(cols
|
||||
.into_iter()
|
||||
.into_pipeline_data_with_metadata(head, ctrlc, metadata)),
|
||||
.into_pipeline_data_with_metadata(head, signals, metadata)),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ not supported."#
|
||||
Err(err) => Some(Value::error(err, head)),
|
||||
}
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -113,7 +113,6 @@ impl Command for Window {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let group_size: Spanned<usize> = call.req(engine_state, stack, 0)?;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let metadata = input.metadata();
|
||||
let stride: Option<usize> = call.get_flag(engine_state, stack, "stride")?;
|
||||
let remainder = call.has_flag(engine_state, stack, "remainder")?;
|
||||
@ -131,7 +130,11 @@ impl Command for Window {
|
||||
remainder,
|
||||
};
|
||||
|
||||
Ok(each_group_iterator.into_pipeline_data_with_metadata(head, ctrlc, metadata))
|
||||
Ok(each_group_iterator.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.signals().clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,7 +42,7 @@ impl Command for Wrap {
|
||||
| PipelineData::ListStream { .. } => Ok(input
|
||||
.into_iter()
|
||||
.map(move |x| Value::record(record! { name.clone() => x }, span))
|
||||
.into_pipeline_data_with_metadata(span, engine_state.ctrlc.clone(), metadata)),
|
||||
.into_pipeline_data_with_metadata(span, engine_state.signals().clone(), metadata)),
|
||||
PipelineData::ByteStream(stream, ..) => Ok(Value::record(
|
||||
record! { name => stream.into_value()? },
|
||||
span,
|
||||
|
@ -112,7 +112,7 @@ impl Command for Zip {
|
||||
.into_iter()
|
||||
.zip(other)
|
||||
.map(move |(x, y)| Value::list(vec![x, y], head))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
.into_pipeline_data_with_metadata(head, engine_state.signals().clone(), metadata))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
use csv::{ReaderBuilder, Trim};
|
||||
use nu_protocol::{ByteStream, ListStream, PipelineData, ShellError, Span, Value};
|
||||
use nu_protocol::{ByteStream, ListStream, PipelineData, ShellError, Signals, Span, Value};
|
||||
|
||||
fn from_csv_error(err: csv::Error, span: Span) -> ShellError {
|
||||
ShellError::DelimiterError {
|
||||
@ -25,7 +25,7 @@ fn from_delimited_stream(
|
||||
let input_reader = if let Some(stream) = input.reader() {
|
||||
stream
|
||||
} else {
|
||||
return Ok(ListStream::new(std::iter::empty(), span, None));
|
||||
return Ok(ListStream::new(std::iter::empty(), span, Signals::empty()));
|
||||
};
|
||||
|
||||
let mut reader = ReaderBuilder::new()
|
||||
@ -83,7 +83,7 @@ fn from_delimited_stream(
|
||||
Value::record(columns.zip(values).collect(), span)
|
||||
});
|
||||
|
||||
Ok(ListStream::new(iter, span, None))
|
||||
Ok(ListStream::new(iter, span, Signals::empty()))
|
||||
}
|
||||
|
||||
pub(super) struct DelimitedReaderConfig {
|
||||
@ -106,7 +106,7 @@ pub(super) fn from_delimited_data(
|
||||
PipelineData::Empty => Ok(PipelineData::Empty),
|
||||
PipelineData::Value(value, metadata) => {
|
||||
let string = value.into_string()?;
|
||||
let byte_stream = ByteStream::read_string(string, name, None);
|
||||
let byte_stream = ByteStream::read_string(string, name, Signals::empty());
|
||||
Ok(PipelineData::ListStream(
|
||||
from_delimited_stream(config, byte_stream, name)?,
|
||||
metadata,
|
||||
|
@ -1,10 +1,7 @@
|
||||
use std::{
|
||||
io::{BufRead, Cursor},
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use std::io::{BufRead, Cursor};
|
||||
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{ListStream, PipelineMetadata};
|
||||
use nu_protocol::{ListStream, PipelineMetadata, Signals};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FromJson;
|
||||
@ -80,7 +77,12 @@ impl Command for FromJson {
|
||||
match input {
|
||||
PipelineData::Value(Value::String { val, .. }, metadata) => {
|
||||
Ok(PipelineData::ListStream(
|
||||
read_json_lines(Cursor::new(val), span, strict, engine_state.ctrlc.clone()),
|
||||
read_json_lines(
|
||||
Cursor::new(val),
|
||||
span,
|
||||
strict,
|
||||
engine_state.signals().clone(),
|
||||
),
|
||||
update_metadata(metadata),
|
||||
))
|
||||
}
|
||||
@ -89,7 +91,7 @@ impl Command for FromJson {
|
||||
{
|
||||
if let Some(reader) = stream.reader() {
|
||||
Ok(PipelineData::ListStream(
|
||||
read_json_lines(reader, span, strict, None),
|
||||
read_json_lines(reader, span, strict, Signals::empty()),
|
||||
update_metadata(metadata),
|
||||
))
|
||||
} else {
|
||||
@ -127,7 +129,7 @@ fn read_json_lines(
|
||||
input: impl BufRead + Send + 'static,
|
||||
span: Span,
|
||||
strict: bool,
|
||||
interrupt: Option<Arc<AtomicBool>>,
|
||||
signals: Signals,
|
||||
) -> ListStream {
|
||||
let iter = input
|
||||
.lines()
|
||||
@ -142,7 +144,7 @@ fn read_json_lines(
|
||||
})
|
||||
.map(move |result| result.unwrap_or_else(|err| Value::error(err, span)));
|
||||
|
||||
ListStream::new(iter, span, interrupt)
|
||||
ListStream::new(iter, span, signals)
|
||||
}
|
||||
|
||||
fn convert_nujson_to_value(value: nu_json::Value, span: Span) -> Value {
|
||||
|
@ -5,12 +5,12 @@ use std::{
|
||||
error::Error,
|
||||
io::{self, Cursor, ErrorKind},
|
||||
string::FromUtf8Error,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
|
||||
use byteorder::{BigEndian, ReadBytesExt};
|
||||
use chrono::{TimeZone, Utc};
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::Signals;
|
||||
use rmp::decode::{self as mp, ValueReadError};
|
||||
|
||||
/// Max recursion depth
|
||||
@ -111,7 +111,7 @@ MessagePack: https://msgpack.org/
|
||||
let opts = Opts {
|
||||
span: call.head,
|
||||
objects,
|
||||
ctrlc: engine_state.ctrlc.clone(),
|
||||
signals: engine_state.signals().clone(),
|
||||
};
|
||||
match input {
|
||||
// Deserialize from a byte buffer
|
||||
@ -227,7 +227,7 @@ impl From<ReadError> for ShellError {
|
||||
pub(crate) struct Opts {
|
||||
pub span: Span,
|
||||
pub objects: bool,
|
||||
pub ctrlc: Option<Arc<AtomicBool>>,
|
||||
pub signals: Signals,
|
||||
}
|
||||
|
||||
/// Read single or multiple values into PipelineData
|
||||
@ -238,7 +238,7 @@ pub(crate) fn read_msgpack(
|
||||
let Opts {
|
||||
span,
|
||||
objects,
|
||||
ctrlc,
|
||||
signals,
|
||||
} = opts;
|
||||
if objects {
|
||||
// Make an iterator that reads multiple values from the reader
|
||||
@ -262,7 +262,7 @@ pub(crate) fn read_msgpack(
|
||||
None
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(span, ctrlc))
|
||||
.into_pipeline_data(span, signals))
|
||||
} else {
|
||||
// Read a single value and then make sure it's EOF
|
||||
let result = read_value(&mut input, span, 0)?;
|
||||
|
@ -41,7 +41,7 @@ impl Command for FromMsgpackz {
|
||||
let opts = Opts {
|
||||
span,
|
||||
objects,
|
||||
ctrlc: engine_state.ctrlc.clone(),
|
||||
signals: engine_state.signals().clone(),
|
||||
};
|
||||
match input {
|
||||
// Deserialize from a byte buffer
|
||||
|
@ -1,7 +1,7 @@
|
||||
use csv::WriterBuilder;
|
||||
use nu_cmd_base::formats::to::delimited::merge_descriptors;
|
||||
use nu_protocol::{
|
||||
ByteStream, ByteStreamType, Config, PipelineData, ShellError, Span, Spanned, Value,
|
||||
ByteStream, ByteStreamType, Config, PipelineData, ShellError, Signals, Span, Spanned, Value,
|
||||
};
|
||||
use std::{iter, sync::Arc};
|
||||
|
||||
@ -128,37 +128,42 @@ pub fn to_delimited_data(
|
||||
// If we're configured to generate a header, we generate it first, then set this false
|
||||
let mut is_header = !noheaders;
|
||||
|
||||
let stream = ByteStream::from_fn(head, None, ByteStreamType::String, move |buffer| {
|
||||
let mut wtr = WriterBuilder::new()
|
||||
.delimiter(separator)
|
||||
.from_writer(buffer);
|
||||
let stream = ByteStream::from_fn(
|
||||
head,
|
||||
Signals::empty(),
|
||||
ByteStreamType::String,
|
||||
move |buffer| {
|
||||
let mut wtr = WriterBuilder::new()
|
||||
.delimiter(separator)
|
||||
.from_writer(buffer);
|
||||
|
||||
if is_header {
|
||||
// Unless we are configured not to write a header, we write the header row now, once,
|
||||
// before everything else.
|
||||
wtr.write_record(&columns)
|
||||
.map_err(|err| make_csv_error(err, format_name, head))?;
|
||||
is_header = false;
|
||||
Ok(true)
|
||||
} else if let Some(row) = iter.next() {
|
||||
// Write each column of a normal row, in order
|
||||
let record = row.into_record()?;
|
||||
for column in &columns {
|
||||
let field = record
|
||||
.get(column)
|
||||
.map(|v| to_string_tagged_value(v, &config, format_name))
|
||||
.unwrap_or(Ok(String::new()))?;
|
||||
wtr.write_field(field)
|
||||
if is_header {
|
||||
// Unless we are configured not to write a header, we write the header row now, once,
|
||||
// before everything else.
|
||||
wtr.write_record(&columns)
|
||||
.map_err(|err| make_csv_error(err, format_name, head))?;
|
||||
is_header = false;
|
||||
Ok(true)
|
||||
} else if let Some(row) = iter.next() {
|
||||
// Write each column of a normal row, in order
|
||||
let record = row.into_record()?;
|
||||
for column in &columns {
|
||||
let field = record
|
||||
.get(column)
|
||||
.map(|v| to_string_tagged_value(v, &config, format_name))
|
||||
.unwrap_or(Ok(String::new()))?;
|
||||
wtr.write_field(field)
|
||||
.map_err(|err| make_csv_error(err, format_name, head))?;
|
||||
}
|
||||
// End the row
|
||||
wtr.write_record(iter::empty::<String>())
|
||||
.map_err(|err| make_csv_error(err, format_name, head))?;
|
||||
Ok(true)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
// End the row
|
||||
wtr.write_record(iter::empty::<String>())
|
||||
.map_err(|err| make_csv_error(err, format_name, head))?;
|
||||
Ok(true)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
Ok(PipelineData::ByteStream(stream, metadata))
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use std::io;
|
||||
|
||||
use byteorder::{BigEndian, WriteBytesExt};
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{ast::PathMember, Spanned};
|
||||
use nu_protocol::{ast::PathMember, Signals, Spanned};
|
||||
use rmp::encode as mp;
|
||||
|
||||
/// Max recursion depth
|
||||
@ -189,7 +189,7 @@ pub(crate) fn write_value(
|
||||
// Convert range to list
|
||||
write_value(
|
||||
out,
|
||||
&Value::list(val.into_range_iter(span, None).collect(), span),
|
||||
&Value::list(val.into_range_iter(span, Signals::empty()).collect(), span),
|
||||
depth,
|
||||
)?;
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ impl Command for ToText {
|
||||
ByteStream::from_iter(
|
||||
iter,
|
||||
span,
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals().clone(),
|
||||
ByteStreamType::String,
|
||||
),
|
||||
update_metadata(meta),
|
||||
|
@ -166,7 +166,7 @@ used as the next argument to the closure, otherwise generation stops.
|
||||
|
||||
Ok(iter
|
||||
.flatten()
|
||||
.into_pipeline_data(call.head, engine_state.ctrlc.clone()))
|
||||
.into_pipeline_data(call.head, engine_state.signals().clone()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -129,7 +129,7 @@ pub fn run_seq(
|
||||
span,
|
||||
},
|
||||
span,
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals().clone(),
|
||||
)
|
||||
} else {
|
||||
ListStream::new(
|
||||
@ -141,7 +141,7 @@ pub fn run_seq(
|
||||
span,
|
||||
},
|
||||
span,
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals().clone(),
|
||||
)
|
||||
};
|
||||
|
||||
|
@ -96,7 +96,7 @@ where
|
||||
}
|
||||
} else {
|
||||
let args = Arguments { binary, cell_paths };
|
||||
operate(action::<D>, args, input, head, engine_state.ctrlc.clone())
|
||||
operate(action::<D>, args, input, head, engine_state.signals())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -42,10 +42,7 @@ impl Command for SubCommand {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
input.map(
|
||||
move |value| abs_helper(value, head),
|
||||
engine_state.ctrlc.clone(),
|
||||
)
|
||||
input.map(move |value| abs_helper(value, head), engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -41,10 +41,7 @@ impl Command for SubCommand {
|
||||
if matches!(input, PipelineData::Empty) {
|
||||
return Err(ShellError::PipelineEmpty { dst_span: head });
|
||||
}
|
||||
input.map(
|
||||
move |value| operate(value, head),
|
||||
engine_state.ctrlc.clone(),
|
||||
)
|
||||
input.map(move |value| operate(value, head), engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -41,10 +41,7 @@ impl Command for SubCommand {
|
||||
if matches!(input, PipelineData::Empty) {
|
||||
return Err(ShellError::PipelineEmpty { dst_span: head });
|
||||
}
|
||||
input.map(
|
||||
move |value| operate(value, head),
|
||||
engine_state.ctrlc.clone(),
|
||||
)
|
||||
input.map(move |value| operate(value, head), engine_state.signals())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -59,7 +59,7 @@ impl Command for SubCommand {
|
||||
let base = base.item;
|
||||
input.map(
|
||||
move |value| operate(value, head, base),
|
||||
engine_state.ctrlc.clone(),
|
||||
engine_state.signals(),
|
||||
)
|
||||
}
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user