Add and use new Signals struct (#13314)

# Description
This PR introduces a new `Signals` struct to replace our adhoc passing
around of `ctrlc: Option<Arc<AtomicBool>>`. Doing so has a few benefits:
- We can better enforce when/where resetting or triggering an interrupt
is allowed.
- Consolidates `nu_utils::ctrl_c::was_pressed` and other ad-hoc
re-implementations into a single place: `Signals::check`.
- This allows us to add other types of signals later if we want. E.g.,
exiting or suspension.
- Similarly, we can more easily change the underlying implementation if
we need to in the future.
- Places that used to have a `ctrlc` of `None` now use
`Signals::empty()`, so we can double check these usages for correctness
in the future.
This commit is contained in:
Ian Manske
2024-07-07 22:29:01 +00:00
committed by GitHub
parent c6b6b1b7a8
commit 399a7c8836
246 changed files with 1332 additions and 1234 deletions

View File

@ -1,5 +1,5 @@
use csv::{ReaderBuilder, Trim};
use nu_protocol::{ByteStream, ListStream, PipelineData, ShellError, Span, Value};
use nu_protocol::{ByteStream, ListStream, PipelineData, ShellError, Signals, Span, Value};
fn from_csv_error(err: csv::Error, span: Span) -> ShellError {
ShellError::DelimiterError {
@ -25,7 +25,7 @@ fn from_delimited_stream(
let input_reader = if let Some(stream) = input.reader() {
stream
} else {
return Ok(ListStream::new(std::iter::empty(), span, None));
return Ok(ListStream::new(std::iter::empty(), span, Signals::empty()));
};
let mut reader = ReaderBuilder::new()
@ -83,7 +83,7 @@ fn from_delimited_stream(
Value::record(columns.zip(values).collect(), span)
});
Ok(ListStream::new(iter, span, None))
Ok(ListStream::new(iter, span, Signals::empty()))
}
pub(super) struct DelimitedReaderConfig {
@ -106,7 +106,7 @@ pub(super) fn from_delimited_data(
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Value(value, metadata) => {
let string = value.into_string()?;
let byte_stream = ByteStream::read_string(string, name, None);
let byte_stream = ByteStream::read_string(string, name, Signals::empty());
Ok(PipelineData::ListStream(
from_delimited_stream(config, byte_stream, name)?,
metadata,

View File

@ -1,10 +1,7 @@
use std::{
io::{BufRead, Cursor},
sync::{atomic::AtomicBool, Arc},
};
use std::io::{BufRead, Cursor};
use nu_engine::command_prelude::*;
use nu_protocol::{ListStream, PipelineMetadata};
use nu_protocol::{ListStream, PipelineMetadata, Signals};
#[derive(Clone)]
pub struct FromJson;
@ -80,7 +77,12 @@ impl Command for FromJson {
match input {
PipelineData::Value(Value::String { val, .. }, metadata) => {
Ok(PipelineData::ListStream(
read_json_lines(Cursor::new(val), span, strict, engine_state.ctrlc.clone()),
read_json_lines(
Cursor::new(val),
span,
strict,
engine_state.signals().clone(),
),
update_metadata(metadata),
))
}
@ -89,7 +91,7 @@ impl Command for FromJson {
{
if let Some(reader) = stream.reader() {
Ok(PipelineData::ListStream(
read_json_lines(reader, span, strict, None),
read_json_lines(reader, span, strict, Signals::empty()),
update_metadata(metadata),
))
} else {
@ -127,7 +129,7 @@ fn read_json_lines(
input: impl BufRead + Send + 'static,
span: Span,
strict: bool,
interrupt: Option<Arc<AtomicBool>>,
signals: Signals,
) -> ListStream {
let iter = input
.lines()
@ -142,7 +144,7 @@ fn read_json_lines(
})
.map(move |result| result.unwrap_or_else(|err| Value::error(err, span)));
ListStream::new(iter, span, interrupt)
ListStream::new(iter, span, signals)
}
fn convert_nujson_to_value(value: nu_json::Value, span: Span) -> Value {

View File

@ -5,12 +5,12 @@ use std::{
error::Error,
io::{self, Cursor, ErrorKind},
string::FromUtf8Error,
sync::{atomic::AtomicBool, Arc},
};
use byteorder::{BigEndian, ReadBytesExt};
use chrono::{TimeZone, Utc};
use nu_engine::command_prelude::*;
use nu_protocol::Signals;
use rmp::decode::{self as mp, ValueReadError};
/// Max recursion depth
@ -111,7 +111,7 @@ MessagePack: https://msgpack.org/
let opts = Opts {
span: call.head,
objects,
ctrlc: engine_state.ctrlc.clone(),
signals: engine_state.signals().clone(),
};
match input {
// Deserialize from a byte buffer
@ -227,7 +227,7 @@ impl From<ReadError> for ShellError {
pub(crate) struct Opts {
pub span: Span,
pub objects: bool,
pub ctrlc: Option<Arc<AtomicBool>>,
pub signals: Signals,
}
/// Read single or multiple values into PipelineData
@ -238,7 +238,7 @@ pub(crate) fn read_msgpack(
let Opts {
span,
objects,
ctrlc,
signals,
} = opts;
if objects {
// Make an iterator that reads multiple values from the reader
@ -262,7 +262,7 @@ pub(crate) fn read_msgpack(
None
}
})
.into_pipeline_data(span, ctrlc))
.into_pipeline_data(span, signals))
} else {
// Read a single value and then make sure it's EOF
let result = read_value(&mut input, span, 0)?;

View File

@ -41,7 +41,7 @@ impl Command for FromMsgpackz {
let opts = Opts {
span,
objects,
ctrlc: engine_state.ctrlc.clone(),
signals: engine_state.signals().clone(),
};
match input {
// Deserialize from a byte buffer

View File

@ -1,7 +1,7 @@
use csv::WriterBuilder;
use nu_cmd_base::formats::to::delimited::merge_descriptors;
use nu_protocol::{
ByteStream, ByteStreamType, Config, PipelineData, ShellError, Span, Spanned, Value,
ByteStream, ByteStreamType, Config, PipelineData, ShellError, Signals, Span, Spanned, Value,
};
use std::{iter, sync::Arc};
@ -128,37 +128,42 @@ pub fn to_delimited_data(
// If we're configured to generate a header, we generate it first, then set this false
let mut is_header = !noheaders;
let stream = ByteStream::from_fn(head, None, ByteStreamType::String, move |buffer| {
let mut wtr = WriterBuilder::new()
.delimiter(separator)
.from_writer(buffer);
let stream = ByteStream::from_fn(
head,
Signals::empty(),
ByteStreamType::String,
move |buffer| {
let mut wtr = WriterBuilder::new()
.delimiter(separator)
.from_writer(buffer);
if is_header {
// Unless we are configured not to write a header, we write the header row now, once,
// before everything else.
wtr.write_record(&columns)
.map_err(|err| make_csv_error(err, format_name, head))?;
is_header = false;
Ok(true)
} else if let Some(row) = iter.next() {
// Write each column of a normal row, in order
let record = row.into_record()?;
for column in &columns {
let field = record
.get(column)
.map(|v| to_string_tagged_value(v, &config, format_name))
.unwrap_or(Ok(String::new()))?;
wtr.write_field(field)
if is_header {
// Unless we are configured not to write a header, we write the header row now, once,
// before everything else.
wtr.write_record(&columns)
.map_err(|err| make_csv_error(err, format_name, head))?;
is_header = false;
Ok(true)
} else if let Some(row) = iter.next() {
// Write each column of a normal row, in order
let record = row.into_record()?;
for column in &columns {
let field = record
.get(column)
.map(|v| to_string_tagged_value(v, &config, format_name))
.unwrap_or(Ok(String::new()))?;
wtr.write_field(field)
.map_err(|err| make_csv_error(err, format_name, head))?;
}
// End the row
wtr.write_record(iter::empty::<String>())
.map_err(|err| make_csv_error(err, format_name, head))?;
Ok(true)
} else {
Ok(false)
}
// End the row
wtr.write_record(iter::empty::<String>())
.map_err(|err| make_csv_error(err, format_name, head))?;
Ok(true)
} else {
Ok(false)
}
});
},
);
Ok(PipelineData::ByteStream(stream, metadata))
}

View File

@ -5,7 +5,7 @@ use std::io;
use byteorder::{BigEndian, WriteBytesExt};
use nu_engine::command_prelude::*;
use nu_protocol::{ast::PathMember, Spanned};
use nu_protocol::{ast::PathMember, Signals, Spanned};
use rmp::encode as mp;
/// Max recursion depth
@ -189,7 +189,7 @@ pub(crate) fn write_value(
// Convert range to list
write_value(
out,
&Value::list(val.into_range_iter(span, None).collect(), span),
&Value::list(val.into_range_iter(span, Signals::empty()).collect(), span),
depth,
)?;
}

View File

@ -60,7 +60,7 @@ impl Command for ToText {
ByteStream::from_iter(
iter,
span,
engine_state.ctrlc.clone(),
engine_state.signals().clone(),
ByteStreamType::String,
),
update_metadata(meta),