Replace ExternalStream with new ByteStream type (#12774)

# Description
This PR introduces a `ByteStream` type which is a `Read`-able stream of
bytes. Internally, it has an enum over three different byte stream
sources:
```rust
pub enum ByteStreamSource {
    Read(Box<dyn Read + Send + 'static>),
    File(File),
    Child(ChildProcess),
}
```

This is in comparison to the current `RawStream` type, which is an
`Iterator<Item = Vec<u8>>` and has to allocate for each read chunk.

Currently, `PipelineData::ExternalStream` serves a weird dual role where
it is either external command output or a wrapper around `RawStream`.
`ByteStream` makes this distinction more clear (via `ByteStreamSource`)
and replaces `PipelineData::ExternalStream` in this PR:
```rust
pub enum PipelineData {
    Empty,
    Value(Value, Option<PipelineMetadata>),
    ListStream(ListStream, Option<PipelineMetadata>),
    ByteStream(ByteStream, Option<PipelineMetadata>),
}
```

The PR is relatively large, but a decent amount of it is just repetitive
changes.

This PR fixes #7017, fixes #10763, and fixes #12369.

This PR also improves performance when piping external commands. Nushell
should, in most cases, have competitive pipeline throughput compared to,
e.g., bash.
| Command | Before (MB/s) | After (MB/s) | Bash (MB/s) |
| -------------------------------------------------- | -------------:|
------------:| -----------:|
| `throughput \| rg 'x'` | 3059 | 3744 | 3739 |
| `throughput \| nu --testbin relay o> /dev/null` | 3508 | 8087 | 8136 |

# User-Facing Changes
- This is a breaking change for the plugin communication protocol,
because the `ExternalStreamInfo` was replaced with `ByteStreamInfo`.
Plugins now only have to deal with a single input stream, as opposed to
the previous three streams: stdout, stderr, and exit code.
- The output of `describe` has been changed for external/byte streams.
- Temporary breaking change: `bytes starts-with` no longer works with
byte streams. This is to keep the PR smaller, and `bytes ends-with`
already does not work on byte streams.
- If a process core dumped, then instead of having a `Value::Error` in
the `exit_code` column of the output returned from `complete`, it now is
a `Value::Int` with the negation of the signal number.

# After Submitting
- Update docs and book as necessary
- Release notes (e.g., plugin protocol changes)
- Adapt/convert commands to work with byte streams (high priority is
`str length`, `bytes starts-with`, and maybe `bytes ends-with`).
- Refactor the `tee` code, Devyn has already done some work on this.

---------

Co-authored-by: Devyn Cairns <devyn.cairns@gmail.com>
This commit is contained in:
Ian Manske
2024-05-16 14:11:18 +00:00
committed by GitHub
parent 1b8eb23785
commit 6fd854ed9f
210 changed files with 3955 additions and 4012 deletions

View File

@ -60,63 +60,13 @@ impl Command for BytesStartsWith {
pattern,
cell_paths,
};
match input {
PipelineData::ExternalStream {
stdout: Some(stream),
span,
..
} => {
let mut i = 0;
for item in stream {
let byte_slice = match &item {
// String and binary data are valid byte patterns
Ok(Value::String { val, .. }) => val.as_bytes(),
Ok(Value::Binary { val, .. }) => val,
// If any Error value is output, echo it back
Ok(v @ Value::Error { .. }) => return Ok(v.clone().into_pipeline_data()),
// Unsupported data
Ok(other) => {
return Ok(Value::error(
ShellError::OnlySupportsThisInputType {
exp_input_type: "string and binary".into(),
wrong_type: other.get_type().to_string(),
dst_span: span,
src_span: other.span(),
},
span,
)
.into_pipeline_data());
}
Err(err) => return Err(err.to_owned()),
};
let max = byte_slice.len().min(arg.pattern.len() - i);
if byte_slice[..max] == arg.pattern[i..i + max] {
i += max;
if i >= arg.pattern.len() {
return Ok(Value::bool(true, span).into_pipeline_data());
}
} else {
return Ok(Value::bool(false, span).into_pipeline_data());
}
}
// We reached the end of the stream and never returned,
// the pattern wasn't exhausted so it probably doesn't match
Ok(Value::bool(false, span).into_pipeline_data())
}
_ => operate(
starts_with,
arg,
input,
call.head,
engine_state.ctrlc.clone(),
),
}
operate(
starts_with,
arg,
input,
call.head,
engine_state.ctrlc.clone(),
)
}
fn examples(&self) -> Vec<Example> {

View File

@ -121,7 +121,7 @@ impl Command for Histogram {
};
let span = call.head;
let data_as_value = input.into_value(span);
let data_as_value = input.into_value(span)?;
let value_span = data_as_value.span();
// `input` is not a list, here we can return an error.
run_histogram(

View File

@ -127,25 +127,15 @@ fn into_binary(
let cell_paths = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
match input {
PipelineData::ExternalStream { stdout: None, .. } => {
Ok(Value::binary(vec![], head).into_pipeline_data())
}
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
// TODO: in the future, we may want this to stream out, converting each to bytes
let output = stream.into_bytes()?;
Ok(Value::binary(output.item, head).into_pipeline_data())
}
_ => {
let args = Arguments {
cell_paths,
compact: call.has_flag(engine_state, stack, "compact")?,
};
operate(action, args, input, call.head, engine_state.ctrlc.clone())
}
if let PipelineData::ByteStream(stream, ..) = input {
// TODO: in the future, we may want this to stream out, converting each to bytes
Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data())
} else {
let args = Arguments {
cell_paths,
compact: call.has_flag(engine_state, stack, "compact")?,
};
operate(action, args, input, call.head, engine_state.ctrlc.clone())
}
}

View File

@ -101,11 +101,11 @@ fn into_cell_path(call: &Call, input: PipelineData) -> Result<PipelineData, Shel
let list: Vec<_> = stream.into_iter().collect();
Ok(list_to_cell_path(&list, head)?.into_pipeline_data())
}
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType {
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, int".into(),
wrong_type: "raw data".into(),
wrong_type: "byte stream".into(),
dst_span: head,
src_span: span,
src_span: stream.span(),
}),
PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: head }),
}

View File

@ -82,20 +82,12 @@ fn glob_helper(
let head = call.head;
let cell_paths = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let args = Arguments { cell_paths };
match input {
PipelineData::ExternalStream { stdout: None, .. } => {
Ok(Value::glob(String::new(), false, head).into_pipeline_data())
}
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
// TODO: in the future, we may want this to stream out, converting each to bytes
let output = stream.into_string()?;
Ok(Value::glob(output.item, false, head).into_pipeline_data())
}
_ => operate(action, args, input, head, engine_state.ctrlc.clone()),
if let PipelineData::ByteStream(stream, ..) = input {
// TODO: in the future, we may want this to stream out, converting each to bytes
Ok(Value::glob(stream.into_string()?, false, head).into_pipeline_data())
} else {
let args = Arguments { cell_paths };
operate(action, args, input, head, engine_state.ctrlc.clone())
}
}

View File

@ -108,7 +108,7 @@ fn into_record(
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let input = input.into_value(call.head);
let input = input.into_value(call.head)?;
let input_type = input.get_type();
let span = input.span();
let res = match input {

View File

@ -155,26 +155,18 @@ fn string_helper(
}
let cell_paths = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let config = engine_state.get_config().clone();
let args = Arguments {
decimals_value,
cell_paths,
config,
};
match input {
PipelineData::ExternalStream { stdout: None, .. } => {
Ok(Value::string(String::new(), head).into_pipeline_data())
}
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
// TODO: in the future, we may want this to stream out, converting each to bytes
let output = stream.into_string()?;
Ok(Value::string(output.item, head).into_pipeline_data())
}
_ => operate(action, args, input, head, engine_state.ctrlc.clone()),
if let PipelineData::ByteStream(stream, ..) = input {
// TODO: in the future, we may want this to stream out, converting each to bytes
Ok(Value::string(stream.into_string()?, head).into_pipeline_data())
} else {
let config = engine_state.get_config().clone();
let args = Arguments {
decimals_value,
cell_paths,
config,
};
operate(action, args, input, head, engine_state.ctrlc.clone())
}
}

View File

@ -91,7 +91,7 @@ impl SQLiteDatabase {
}
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
let value = input.into_value(span);
let value = input.into_value(span)?;
Self::try_from_value(value)
}

View File

@ -29,7 +29,7 @@ impl Command for Inspect {
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let input_metadata = input.metadata();
let input_val = input.into_value(call.head);
let input_val = input.into_value(call.head)?;
if input_val.is_nothing() {
return Err(ShellError::PipelineEmpty {
dst_span: call.head,

View File

@ -53,13 +53,12 @@ impl Command for TimeIt {
eval_block(engine_state, stack, block, input)?
} else {
let eval_expression_with_input = get_eval_expression_with_input(engine_state);
eval_expression_with_input(engine_state, stack, command_to_run, input)
.map(|res| res.0)?
eval_expression_with_input(engine_state, stack, command_to_run, input)?.0
}
} else {
PipelineData::empty()
}
.into_value(call.head);
.into_value(call.head)?;
let end_time = Instant::now();

View File

@ -1,8 +1,8 @@
use super::util::get_rest_for_glob_pattern;
#[allow(deprecated)]
use nu_engine::{command_prelude::*, current_dir, get_eval_block};
use nu_protocol::{BufferedReader, DataSource, NuGlob, PipelineMetadata, RawStream};
use std::{io::BufReader, path::Path};
use nu_protocol::{ByteStream, DataSource, NuGlob, PipelineMetadata};
use std::path::Path;
#[cfg(feature = "sqlite")]
use crate::database::SQLiteDatabase;
@ -143,23 +143,13 @@ impl Command for Open {
}
};
let buf_reader = BufReader::new(file);
let file_contents = PipelineData::ExternalStream {
stdout: Some(RawStream::new(
Box::new(BufferedReader::new(buf_reader)),
ctrlc.clone(),
call_span,
None,
)),
stderr: None,
exit_code: None,
span: call_span,
metadata: Some(PipelineMetadata {
let stream = PipelineData::ByteStream(
ByteStream::file(file, call_span, ctrlc.clone()),
Some(PipelineMetadata {
data_source: DataSource::FilePath(path.to_path_buf()),
}),
trim_end_newline: false,
};
);
let exts_opt: Option<Vec<String>> = if raw {
None
} else {
@ -184,9 +174,9 @@ impl Command for Open {
let decl = engine_state.get_decl(converter_id);
let command_output = if let Some(block_id) = decl.get_block_id() {
let block = engine_state.get_block(block_id);
eval_block(engine_state, stack, block, file_contents)
eval_block(engine_state, stack, block, stream)
} else {
decl.run(engine_state, stack, &Call::new(call_span), file_contents)
decl.run(engine_state, stack, &Call::new(call_span), stream)
};
output.push(command_output.map_err(|inner| {
ShellError::GenericError{
@ -198,7 +188,7 @@ impl Command for Open {
}
})?);
}
None => output.push(file_contents),
None => output.push(stream),
}
}
}

View File

@ -5,12 +5,15 @@ use nu_engine::{command_prelude::*, current_dir};
use nu_path::expand_path_with;
use nu_protocol::{
ast::{Expr, Expression},
DataSource, OutDest, PipelineMetadata, RawStream,
byte_stream::copy_with_interrupt,
process::ChildPipe,
ByteStreamSource, DataSource, OutDest, PipelineMetadata,
};
use std::{
fs::File,
io::Write,
io::{self, BufRead, BufReader, Read, Write},
path::{Path, PathBuf},
sync::{atomic::AtomicBool, Arc},
thread,
};
@ -104,12 +107,7 @@ impl Command for Save {
});
match input {
PipelineData::ExternalStream {
stdout,
stderr,
metadata,
..
} => {
PipelineData::ByteStream(stream, metadata) => {
check_saving_to_source_file(metadata.as_ref(), &path, stderr_path.as_ref())?;
let (file, stderr_file) = get_files(
@ -121,40 +119,97 @@ impl Command for Save {
force,
)?;
match (stdout, stderr) {
(Some(stdout), stderr) => {
// delegate a thread to redirect stderr to result.
let handler = stderr
.map(|stderr| match stderr_file {
Some(stderr_file) => thread::Builder::new()
.name("stderr redirector".to_string())
.spawn(move || {
stream_to_file(stderr, stderr_file, span, progress)
}),
None => thread::Builder::new()
.name("stderr redirector".to_string())
.spawn(move || stderr.drain()),
})
.transpose()
.err_span(span)?;
let size = stream.known_size();
let ctrlc = engine_state.ctrlc.clone();
let res = stream_to_file(stdout, file, span, progress);
if let Some(h) = handler {
h.join().map_err(|err| ShellError::ExternalCommand {
label: "Fail to receive external commands stderr message"
.to_string(),
help: format!("{err:?}"),
span,
})??;
}
res?;
match stream.into_source() {
ByteStreamSource::Read(read) => {
stream_to_file(read, size, ctrlc, file, span, progress)?;
}
(None, Some(stderr)) => match stderr_file {
Some(stderr_file) => stream_to_file(stderr, stderr_file, span, progress)?,
None => stderr.drain()?,
},
(None, None) => {}
};
ByteStreamSource::File(source) => {
stream_to_file(source, size, ctrlc, file, span, progress)?;
}
ByteStreamSource::Child(mut child) => {
fn write_or_consume_stderr(
stderr: ChildPipe,
file: Option<File>,
span: Span,
ctrlc: Option<Arc<AtomicBool>>,
progress: bool,
) -> Result<(), ShellError> {
if let Some(file) = file {
match stderr {
ChildPipe::Pipe(pipe) => {
stream_to_file(pipe, None, ctrlc, file, span, progress)
}
ChildPipe::Tee(tee) => {
stream_to_file(tee, None, ctrlc, file, span, progress)
}
}?
} else {
match stderr {
ChildPipe::Pipe(mut pipe) => {
io::copy(&mut pipe, &mut io::sink())
}
ChildPipe::Tee(mut tee) => io::copy(&mut tee, &mut io::sink()),
}
.err_span(span)?;
}
Ok(())
}
match (child.stdout.take(), child.stderr.take()) {
(Some(stdout), stderr) => {
// delegate a thread to redirect stderr to result.
let handler = stderr
.map(|stderr| {
let ctrlc = ctrlc.clone();
thread::Builder::new().name("stderr saver".into()).spawn(
move || {
write_or_consume_stderr(
stderr,
stderr_file,
span,
ctrlc,
progress,
)
},
)
})
.transpose()
.err_span(span)?;
let res = match stdout {
ChildPipe::Pipe(pipe) => {
stream_to_file(pipe, None, ctrlc, file, span, progress)
}
ChildPipe::Tee(tee) => {
stream_to_file(tee, None, ctrlc, file, span, progress)
}
};
if let Some(h) = handler {
h.join().map_err(|err| ShellError::ExternalCommand {
label: "Fail to receive external commands stderr message"
.to_string(),
help: format!("{err:?}"),
span,
})??;
}
res?;
}
(None, Some(stderr)) => {
write_or_consume_stderr(
stderr,
stderr_file,
span,
ctrlc,
progress,
)?;
}
(None, None) => {}
};
}
}
Ok(PipelineData::Empty)
}
@ -302,8 +357,7 @@ fn input_to_bytes(
) -> Result<Vec<u8>, ShellError> {
let ext = if raw {
None
// if is extern stream , in other words , not value
} else if let PipelineData::ExternalStream { .. } = input {
} else if let PipelineData::ByteStream(..) = input {
None
} else if let PipelineData::Value(Value::String { .. }, ..) = input {
None
@ -318,7 +372,7 @@ fn input_to_bytes(
input
};
value_to_bytes(input.into_value(span))
value_to_bytes(input.into_value(span)?)
}
/// Convert given data into content of file of specified extension if
@ -448,84 +502,54 @@ fn get_files(
}
fn stream_to_file(
mut stream: RawStream,
mut source: impl Read,
known_size: Option<u64>,
ctrlc: Option<Arc<AtomicBool>>,
mut file: File,
span: Span,
progress: bool,
) -> Result<(), ShellError> {
// https://github.com/nushell/nushell/pull/9377 contains the reason
// for not using BufWriter<File>
let writer = &mut file;
// https://github.com/nushell/nushell/pull/9377 contains the reason for not using `BufWriter`
if progress {
let mut bytes_processed = 0;
let mut bytes_processed: u64 = 0;
let bytes_processed_p = &mut bytes_processed;
let file_total_size = stream.known_size;
let mut process_failed = false;
let process_failed_p = &mut process_failed;
let mut bar = progress_bar::NuProgressBar::new(known_size);
// Create the progress bar
// It looks a bit messy but I am doing it this way to avoid
// creating the bar when is not needed
let (mut bar_opt, bar_opt_clone) = if progress {
let tmp_bar = progress_bar::NuProgressBar::new(file_total_size);
let tmp_bar_clone = tmp_bar.clone();
// TODO: reduce the number of progress bar updates?
(Some(tmp_bar), Some(tmp_bar_clone))
} else {
(None, None)
};
let mut reader = BufReader::new(source);
stream.try_for_each(move |result| {
let buf = match result {
Ok(v) => match v {
Value::String { val, .. } => val.into_bytes(),
Value::Binary { val, .. } => val,
// Propagate errors by explicitly matching them before the final case.
Value::Error { error, .. } => return Err(*error),
other => {
return Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "string or binary".into(),
wrong_type: other.get_type().to_string(),
dst_span: span,
src_span: other.span(),
});
let res = loop {
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
bar.abandoned_msg("# Cancelled #".to_owned());
return Ok(());
}
match reader.fill_buf() {
Ok(&[]) => break Ok(()),
Ok(buf) => {
file.write_all(buf).err_span(span)?;
let len = buf.len();
reader.consume(len);
bytes_processed += len as u64;
bar.update_bar(bytes_processed);
}
},
Err(err) => {
*process_failed_p = true;
return Err(err);
Err(e) if e.kind() == io::ErrorKind::Interrupted => continue,
Err(e) => break Err(e),
}
};
// If the `progress` flag is set then
if progress {
// Update the total amount of bytes that has been saved and then print the progress bar
*bytes_processed_p += buf.len() as u64;
if let Some(bar) = &mut bar_opt {
bar.update_bar(*bytes_processed_p);
}
}
if let Err(err) = writer.write_all(&buf) {
*process_failed_p = true;
return Err(ShellError::IOError {
msg: err.to_string(),
});
}
Ok(())
})?;
// If the `progress` flag is set then
if progress {
// If the process failed, stop the progress bar with an error message.
if process_failed {
if let Some(bar) = bar_opt_clone {
bar.abandoned_msg("# Error while saving #".to_owned());
}
if let Err(err) = res {
let _ = file.flush();
bar.abandoned_msg("# Error while saving #".to_owned());
Err(err.into_spanned(span).into())
} else {
file.flush().err_span(span)?;
Ok(())
}
} else {
copy_with_interrupt(&mut source, &mut file, span, ctrlc.as_deref())?;
Ok(())
}
file.flush()?;
Ok(())
}

View File

@ -125,13 +125,11 @@ fn getcol(head: Span, input: PipelineData) -> Result<PipelineData, ShellError> {
.into_pipeline_data()
.set_metadata(metadata))
}
PipelineData::ExternalStream { .. } => Err(ShellError::OnlySupportsThisInputType {
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record or table".into(),
wrong_type: "raw data".into(),
wrong_type: "byte stream".into(),
dst_span: head,
src_span: input
.span()
.expect("PipelineData::ExternalStream had no span"),
src_span: stream.span(),
}),
}
}

View File

@ -133,11 +133,11 @@ fn drop_cols(
}
}
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType {
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "table or record".into(),
wrong_type: "raw data".into(),
wrong_type: "byte stream".into(),
dst_span: head,
src_span: span,
src_span: stream.span(),
}),
}
}

View File

@ -129,7 +129,9 @@ with 'transpose' first."#
}
Some(Value::list(vals, span))
}
Ok(data) => Some(data.into_value(head)),
Ok(data) => Some(data.into_value(head).unwrap_or_else(|err| {
Value::error(chain_error_with_input(err, is_error, span), span)
})),
Err(ShellError::Continue { span }) => Some(Value::nothing(span)),
Err(ShellError::Break { .. }) => None,
Err(error) => {
@ -140,37 +142,39 @@ with 'transpose' first."#
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
}
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(stream
.into_iter()
.map_while(move |value| {
let value = match value {
Ok(value) => value,
Err(ShellError::Continue { span }) => {
return Some(Value::nothing(span))
}
Err(ShellError::Break { .. }) => return None,
Err(err) => return Some(Value::error(err, head)),
};
PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() {
let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(chunks
.map_while(move |value| {
let value = match value {
Ok(value) => value,
Err(ShellError::Continue { span }) => {
return Some(Value::nothing(span))
}
Err(ShellError::Break { .. }) => return None,
Err(err) => return Some(Value::error(err, head)),
};
let span = value.span();
let is_error = value.is_error();
match closure.run_with_value(value) {
Ok(data) => Some(data.into_value(head)),
Err(ShellError::Continue { span }) => Some(Value::nothing(span)),
Err(ShellError::Break { .. }) => None,
Err(error) => {
let error = chain_error_with_input(error, is_error, span);
Some(Value::error(error, span))
let span = value.span();
let is_error = value.is_error();
match closure
.run_with_value(value)
.and_then(|data| data.into_value(head))
{
Ok(value) => Some(value),
Err(ShellError::Continue { span }) => Some(Value::nothing(span)),
Err(ShellError::Break { .. }) => None,
Err(error) => {
let error = chain_error_with_input(error, is_error, span);
Some(Value::error(error, span))
}
}
}
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
} else {
Ok(PipelineData::Empty)
}
}
// This match allows non-iterables to be accepted,
// which is currently considered undesirable (Nov 2022).

View File

@ -1,4 +1,5 @@
use nu_engine::command_prelude::*;
use std::io::Read;
pub fn empty(
engine_state: &EngineState,
@ -36,29 +37,26 @@ pub fn empty(
} else {
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::ExternalStream { stdout, .. } => match stdout {
Some(s) => {
let bytes = s.into_bytes();
match bytes {
Ok(s) => {
if negate {
Ok(Value::bool(!s.item.is_empty(), head).into_pipeline_data())
} else {
Ok(Value::bool(s.item.is_empty(), head).into_pipeline_data())
}
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
match stream.reader() {
Some(reader) => {
let is_empty = reader.bytes().next().transpose().err_span(span)?.is_none();
if negate {
Ok(Value::bool(!is_empty, head).into_pipeline_data())
} else {
Ok(Value::bool(is_empty, head).into_pipeline_data())
}
}
None => {
if negate {
Ok(Value::bool(false, head).into_pipeline_data())
} else {
Ok(Value::bool(true, head).into_pipeline_data())
}
Err(err) => Err(err),
}
}
None => {
if negate {
Ok(Value::bool(false, head).into_pipeline_data())
} else {
Ok(Value::bool(true, head).into_pipeline_data())
}
}
},
}
PipelineData::ListStream(s, ..) => {
let empty = s.into_iter().next().is_none();
if negate {

View File

@ -58,33 +58,13 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
| PipelineData::ListStream(..) => {
let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(input
.into_iter()
.filter_map(move |value| match closure.run_with_value(value.clone()) {
Ok(pred) => pred.into_value(head).is_true().then_some(value),
Err(err) => {
let span = value.span();
let err = chain_error_with_input(err, value.is_error(), span);
Some(Value::error(err, span))
}
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
}
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(stream
.into_iter()
.filter_map(move |value| {
let value = match value {
Ok(value) => value,
Err(err) => return Some(Value::error(err, head)),
};
match closure.run_with_value(value.clone()) {
Ok(pred) => pred.into_value(head).is_true().then_some(value),
match closure
.run_with_value(value.clone())
.and_then(|data| data.into_value(head))
{
Ok(cond) => cond.is_true().then_some(value),
Err(err) => {
let span = value.span();
let err = chain_error_with_input(err, value.is_error(), span);
@ -94,14 +74,43 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
}
PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() {
let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(chunks
.into_iter()
.filter_map(move |value| {
let value = match value {
Ok(value) => value,
Err(err) => return Some(Value::error(err, head)),
};
match closure
.run_with_value(value.clone())
.and_then(|data| data.into_value(head))
{
Ok(cond) => cond.is_true().then_some(value),
Err(err) => {
let span = value.span();
let err = chain_error_with_input(err, value.is_error(), span);
Some(Value::error(err, span))
}
}
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
} else {
Ok(PipelineData::Empty)
}
}
// This match allows non-iterables to be accepted,
// which is currently considered undesirable (Nov 2022).
PipelineData::Value(value, ..) => {
let result = ClosureEvalOnce::new(engine_state, stack, closure)
.run_with_value(value.clone());
.run_with_value(value.clone())
.and_then(|data| data.into_value(head));
Ok(match result {
Ok(pred) => pred.into_value(head).is_true().then_some(value),
Ok(cond) => cond.is_true().then_some(value),
Err(err) => {
let span = value.span();
let err = chain_error_with_input(err, value.is_error(), span);

View File

@ -447,57 +447,35 @@ fn find_with_rest_and_highlight(
Ok(PipelineData::ListStream(stream, metadata))
}
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
let mut output: Vec<Value> = vec![];
for filter_val in stream {
match filter_val {
Ok(value) => {
let span = value.span();
match value {
Value::String { val, .. } => {
let split_char = if val.contains("\r\n") { "\r\n" } else { "\n" };
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
if let Some(lines) = stream.lines() {
let terms = lower_terms
.into_iter()
.map(|term| term.to_expanded_string("", &filter_config).to_lowercase())
.collect::<Vec<_>>();
for line in val.split(split_char) {
for term in lower_terms.iter() {
let term_str = term.to_expanded_string("", &filter_config);
let lower_val = line.to_lowercase();
if lower_val.contains(
&term.to_expanded_string("", &config).to_lowercase(),
) {
output.push(Value::string(
highlight_search_string(
line,
&term_str,
&string_style,
&highlight_style,
)?,
span,
))
}
}
}
}
// Propagate errors by explicitly matching them before the final case.
Value::Error { error, .. } => return Err(*error),
other => {
return Err(ShellError::UnsupportedInput {
msg: "unsupported type from raw stream".into(),
input: format!("input: {:?}", other.get_type()),
msg_span: span,
input_span: other.span(),
});
}
let mut output: Vec<Value> = vec![];
for line in lines {
let line = line?.to_lowercase();
for term in &terms {
if line.contains(term) {
output.push(Value::string(
highlight_search_string(
&line,
term,
&string_style,
&highlight_style,
)?,
span,
))
}
}
// Propagate any errors that were in the stream
Err(e) => return Err(e),
};
}
Ok(Value::list(output, span).into_pipeline_data())
} else {
Ok(PipelineData::Empty)
}
Ok(output.into_pipeline_data(span, ctrlc))
}
}
}

View File

@ -170,11 +170,11 @@ fn first_helper(
))
}
}
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType {
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(),
wrong_type: "raw data".into(),
wrong_type: "byte stream".into(),
dst_span: head,
src_span: span,
src_span: stream.span(),
}),
PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(),

View File

@ -81,7 +81,7 @@ If multiple cell paths are given, this will produce a list of values."#
let paths = std::iter::once(cell_path).chain(rest);
let input = input.into_value(span);
let input = input.into_value(span)?;
for path in paths {
let val = input.clone().follow_cell_path(&path.members, !sensitive);

View File

@ -207,7 +207,7 @@ fn group_closure(
for value in values {
let key = closure
.run_with_value(value.clone())?
.into_value(span)
.into_value(span)?
.coerce_into_string()?;
groups.entry(key).or_default().push(value);

View File

@ -66,7 +66,7 @@ impl Command for Headers {
let config = engine_state.get_config();
let metadata = input.metadata();
let span = input.span().unwrap_or(call.head);
let value = input.into_value(span);
let value = input.into_value(span)?;
let Value::List { vals: table, .. } = value else {
return Err(ShellError::TypeMismatch {
err_message: "not a table".to_string(),

View File

@ -190,7 +190,7 @@ fn insert(
let value = value.unwrap_or(Value::nothing(head));
let new_value = ClosureEvalOnce::new(engine_state, stack, *val)
.run_with_value(value.clone())?
.into_value(head);
.into_value(head)?;
pre_elems.push(new_value);
if !end_of_stream {
@ -261,8 +261,8 @@ fn insert(
type_name: "empty pipeline".to_string(),
span: head,
}),
PipelineData::ExternalStream { .. } => Err(ShellError::IncompatiblePathAccess {
type_name: "external stream".to_string(),
PipelineData::ByteStream(..) => Err(ShellError::IncompatiblePathAccess {
type_name: "byte stream".to_string(),
span: head,
}),
}
@ -284,7 +284,7 @@ fn insert_value_by_closure(
value.clone()
};
let new_value = closure.run_with_value(value_at_path)?.into_value(span);
let new_value = closure.run_with_value(value_at_path)?.into_value(span)?;
value.insert_data_at_cell_path(cell_path, new_value, span)
}
@ -304,7 +304,7 @@ fn insert_single_value_by_closure(
value.clone()
};
let new_value = closure.run_with_value(value_at_path)?.into_value(span);
let new_value = closure.run_with_value(value_at_path)?.into_value(span)?;
value.insert_data_at_cell_path(cell_path, new_value, span)
}

View File

@ -55,10 +55,11 @@ impl Command for Items {
let result = closure
.add_arg(Value::string(col, span))
.add_arg(val)
.run_with_input(PipelineData::Empty);
.run_with_input(PipelineData::Empty)
.and_then(|data| data.into_value(head));
match result {
Ok(data) => Some(data.into_value(head)),
Ok(value) => Some(value),
Err(ShellError::Break { .. }) => None,
Err(err) => {
let err = chain_error_with_input(err, false, span);
@ -77,20 +78,18 @@ impl Command for Items {
}),
}
}
PipelineData::ListStream(..) => Err(ShellError::OnlySupportsThisInputType {
PipelineData::ListStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record".into(),
wrong_type: "stream".into(),
dst_span: head,
src_span: head,
dst_span: call.head,
src_span: stream.span(),
}),
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record".into(),
wrong_type: "byte stream".into(),
dst_span: call.head,
src_span: stream.span(),
}),
PipelineData::ExternalStream { span, .. } => {
Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record".into(),
wrong_type: "raw data".into(),
dst_span: head,
src_span: span,
})
}
}
.map(|data| data.set_metadata(metadata))
}

View File

@ -75,7 +75,7 @@ impl Command for Join {
let join_type = join_type(engine_state, stack, call)?;
// FIXME: we should handle ListStreams properly instead of collecting
let collected_input = input.into_value(span);
let collected_input = input.into_value(span)?;
match (&collected_input, &table_2, &l_on, &r_on) {
(

View File

@ -160,14 +160,12 @@ impl Command for Last {
}),
}
}
PipelineData::ExternalStream { span, .. } => {
Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(),
wrong_type: "raw data".into(),
dst_span: head,
src_span: span,
})
}
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(),
wrong_type: "byte stream".into(),
dst_span: head,
src_span: stream.span(),
}),
PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(),
wrong_type: "null".into(),

View File

@ -1,6 +1,4 @@
use nu_engine::command_prelude::*;
use nu_protocol::RawStream;
use std::collections::VecDeque;
#[derive(Clone)]
pub struct Lines;
@ -33,23 +31,33 @@ impl Command for Lines {
let span = input.span().unwrap_or(call.head);
match input {
PipelineData::Value(Value::String { val, .. }, ..) => {
let lines = if skip_empty {
val.lines()
.filter_map(|s| {
if s.trim().is_empty() {
None
} else {
Some(Value::string(s, span))
}
})
.collect()
} else {
val.lines().map(|s| Value::string(s, span)).collect()
};
PipelineData::Value(value, ..) => match value {
Value::String { val, .. } => {
let lines = if skip_empty {
val.lines()
.filter_map(|s| {
if s.trim().is_empty() {
None
} else {
Some(Value::string(s, span))
}
})
.collect()
} else {
val.lines().map(|s| Value::string(s, span)).collect()
};
Ok(Value::list(lines, span).into_pipeline_data())
}
Ok(Value::list(lines, span).into_pipeline_data())
}
// Propagate existing errors
Value::Error { error, .. } => Err(*error),
value => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "string or byte stream".into(),
wrong_type: value.get_type().to_string(),
dst_span: head,
src_span: value.span(),
}),
},
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::ListStream(stream, metadata) => {
let stream = stream.modify(|iter| {
@ -76,27 +84,18 @@ impl Command for Lines {
Ok(PipelineData::ListStream(stream, metadata))
}
PipelineData::Value(val, ..) => {
match val {
// Propagate existing errors
Value::Error { error, .. } => Err(*error),
_ => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "string or raw data".into(),
wrong_type: val.get_type().to_string(),
dst_span: head,
src_span: val.span(),
}),
PipelineData::ByteStream(stream, ..) => {
if let Some(lines) = stream.lines() {
Ok(lines
.map(move |line| match line {
Ok(line) => Value::string(line, head),
Err(err) => Value::error(err, head),
})
.into_pipeline_data(head, ctrlc))
} else {
Ok(PipelineData::empty())
}
}
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
metadata,
..
} => Ok(RawStreamLinesAdapter::new(stream, head, skip_empty)
.map(move |x| x.unwrap_or_else(|err| Value::error(err, head)))
.into_pipeline_data(head, ctrlc)
.set_metadata(metadata)),
}
}
@ -112,108 +111,6 @@ impl Command for Lines {
}
}
#[derive(Debug)]
struct RawStreamLinesAdapter {
inner: RawStream,
inner_complete: bool,
skip_empty: bool,
span: Span,
incomplete_line: String,
queue: VecDeque<String>,
}
impl Iterator for RawStreamLinesAdapter {
type Item = Result<Value, ShellError>;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some(s) = self.queue.pop_front() {
if self.skip_empty && s.trim().is_empty() {
continue;
}
return Some(Ok(Value::string(s, self.span)));
} else {
// inner is complete, feed out remaining state
if self.inner_complete {
return if self.incomplete_line.is_empty() {
None
} else {
Some(Ok(Value::string(
std::mem::take(&mut self.incomplete_line),
self.span,
)))
};
}
// pull more data from inner
if let Some(result) = self.inner.next() {
match result {
Ok(v) => {
let span = v.span();
match v {
// TODO: Value::Binary support required?
Value::String { val, .. } => {
self.span = span;
let mut lines = val.lines();
// handle incomplete line from previous
if !self.incomplete_line.is_empty() {
if let Some(first) = lines.next() {
self.incomplete_line.push_str(first);
self.queue.push_back(std::mem::take(
&mut self.incomplete_line,
));
}
}
// save completed lines
self.queue.extend(lines.map(String::from));
if !val.ends_with('\n') {
// incomplete line, save for next time
// if `val` and `incomplete_line` were empty,
// then pop will return none
if let Some(s) = self.queue.pop_back() {
self.incomplete_line = s;
}
}
}
// Propagate errors by explicitly matching them before the final case.
Value::Error { error, .. } => return Some(Err(*error)),
other => {
return Some(Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(),
wrong_type: other.get_type().to_string(),
dst_span: self.span,
src_span: other.span(),
}));
}
}
}
Err(err) => return Some(Err(err)),
}
} else {
self.inner_complete = true;
}
}
}
}
}
impl RawStreamLinesAdapter {
pub fn new(inner: RawStream, span: Span, skip_empty: bool) -> Self {
Self {
inner,
span,
skip_empty,
incomplete_line: String::new(),
queue: VecDeque::new(),
inner_complete: false,
}
}
}
#[cfg(test)]
mod test {
use super::*;

View File

@ -143,17 +143,16 @@ impl Command for ParEach {
.map(move |(index, value)| {
let span = value.span();
let is_error = value.is_error();
let result =
let value =
ClosureEvalOnce::new(engine_state, stack, closure.clone())
.run_with_value(value);
let value = match result {
Ok(data) => data.into_value(span),
Err(err) => Value::error(
chain_error_with_input(err, is_error, span),
span,
),
};
.run_with_value(value)
.and_then(|data| data.into_value(span))
.unwrap_or_else(|err| {
Value::error(
chain_error_with_input(err, is_error, span),
span,
)
});
(index, value)
})
@ -170,17 +169,16 @@ impl Command for ParEach {
.map(move |(index, value)| {
let span = value.span();
let is_error = value.is_error();
let result =
let value =
ClosureEvalOnce::new(engine_state, stack, closure.clone())
.run_with_value(value);
let value = match result {
Ok(data) => data.into_value(span),
Err(err) => Value::error(
chain_error_with_input(err, is_error, span),
span,
),
};
.run_with_value(value)
.and_then(|data| data.into_value(span))
.unwrap_or_else(|err| {
Value::error(
chain_error_with_input(err, is_error, span),
span,
)
});
(index, value)
})
@ -203,40 +201,12 @@ impl Command for ParEach {
.map(move |(index, value)| {
let span = value.span();
let is_error = value.is_error();
let result = ClosureEvalOnce::new(engine_state, stack, closure.clone())
.run_with_value(value);
let value = match result {
Ok(data) => data.into_value(head),
Err(err) => {
Value::error(chain_error_with_input(err, is_error, span), span)
}
};
(index, value)
})
.collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
})),
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => Ok(create_pool(max_threads)?.install(|| {
let vec = stream
.enumerate()
.par_bridge()
.map(move |(index, value)| {
let value = match value {
Ok(value) => value,
Err(err) => return (index, Value::error(err, head)),
};
let value = ClosureEvalOnce::new(engine_state, stack, closure.clone())
.run_with_value(value)
.map(|data| data.into_value(head))
.unwrap_or_else(|err| Value::error(err, head));
.and_then(|data| data.into_value(head))
.unwrap_or_else(|err| {
Value::error(chain_error_with_input(err, is_error, span), span)
});
(index, value)
})
@ -244,6 +214,34 @@ impl Command for ParEach {
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
})),
PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() {
Ok(create_pool(max_threads)?.install(|| {
let vec = chunks
.enumerate()
.par_bridge()
.map(move |(index, value)| {
let value = match value {
Ok(value) => value,
Err(err) => return (index, Value::error(err, head)),
};
let value =
ClosureEvalOnce::new(engine_state, stack, closure.clone())
.run_with_value(value)
.and_then(|data| data.into_value(head))
.unwrap_or_else(|err| Value::error(err, head));
(index, value)
})
.collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
}))
} else {
Ok(PipelineData::empty())
}
}
}
.and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.ctrlc.clone()))
.map(|data| data.set_metadata(metadata))

View File

@ -115,7 +115,7 @@ impl Command for Reduce {
.add_arg(value)
.add_arg(acc)
.run_with_input(PipelineData::Empty)?
.into_value(head);
.into_value(head)?;
}
Ok(acc.with_span(head).into_pipeline_data())

View File

@ -173,7 +173,7 @@ fn reject(
) -> Result<PipelineData, ShellError> {
let mut unique_rows: HashSet<usize> = HashSet::new();
let metadata = input.metadata();
let val = input.into_value(span);
let val = input.into_value(span)?;
let mut val = val;
let mut new_columns = vec![];
let mut new_rows = vec![];

View File

@ -87,15 +87,14 @@ impl Command for Skip {
let ctrlc = engine_state.ctrlc.clone();
let input_span = input.span().unwrap_or(call.head);
match input {
PipelineData::ExternalStream { .. } => Err(ShellError::OnlySupportsThisInputType {
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(),
wrong_type: "raw data".into(),
wrong_type: "byte stream".into(),
dst_span: call.head,
src_span: input_span,
src_span: stream.span(),
}),
PipelineData::Value(Value::Binary { val, .. }, metadata) => {
let bytes = val.into_iter().skip(n).collect::<Vec<_>>();
Ok(Value::binary(bytes, input_span).into_pipeline_data_with_metadata(metadata))
}
_ => Ok(input

View File

@ -85,7 +85,8 @@ impl Command for SkipUntil {
.skip_while(move |value| {
closure
.run_with_value(value.clone())
.map(|data| data.into_value(head).is_false())
.and_then(|data| data.into_value(head))
.map(|cond| cond.is_false())
.unwrap_or(false)
})
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))

View File

@ -90,7 +90,8 @@ impl Command for SkipWhile {
.skip_while(move |value| {
closure
.run_with_value(value.clone())
.map(|data| data.into_value(head).is_true())
.and_then(|data| data.into_value(head))
.map(|cond| cond.is_true())
.unwrap_or(false)
})
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))

View File

@ -78,14 +78,12 @@ impl Command for Take {
stream.modify(|iter| iter.take(rows_desired)),
metadata,
)),
PipelineData::ExternalStream { span, .. } => {
Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(),
wrong_type: "raw data".into(),
dst_span: head,
src_span: span,
})
}
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(),
wrong_type: "byte stream".into(),
dst_span: head,
src_span: stream.span(),
}),
PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(),
wrong_type: "null".into(),

View File

@ -81,7 +81,8 @@ impl Command for TakeUntil {
.take_while(move |value| {
closure
.run_with_value(value.clone())
.map(|data| data.into_value(head).is_false())
.and_then(|data| data.into_value(head))
.map(|cond| cond.is_false())
.unwrap_or(false)
})
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))

View File

@ -81,7 +81,8 @@ impl Command for TakeWhile {
.take_while(move |value| {
closure
.run_with_value(value.clone())
.map(|data| data.into_value(head).is_true())
.and_then(|data| data.into_value(head))
.map(|cond| cond.is_true())
.unwrap_or(false)
})
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))

View File

@ -1,6 +1,17 @@
use nu_engine::{command_prelude::*, get_eval_block_with_early_return};
use nu_protocol::{engine::Closure, OutDest, RawStream};
use std::{sync::mpsc, thread};
use nu_protocol::{
byte_stream::copy_with_interrupt, engine::Closure, process::ChildPipe, ByteStream,
ByteStreamSource, OutDest,
};
use std::{
io::{self, Read, Write},
sync::{
atomic::AtomicBool,
mpsc::{self, Sender},
Arc,
},
thread::{self, JoinHandle},
};
#[derive(Clone)]
pub struct Tee;
@ -67,138 +78,205 @@ use it in your pipeline."#
let head = call.head;
let use_stderr = call.has_flag(engine_state, stack, "stderr")?;
let Spanned {
item: Closure { block_id, captures },
span: closure_span,
} = call.req(engine_state, stack, 0)?;
let closure: Spanned<Closure> = call.req(engine_state, stack, 0)?;
let closure_span = closure.span;
let closure = closure.item;
let closure_engine_state = engine_state.clone();
let mut closure_stack = stack
.captures_to_stack_preserve_out_dest(captures)
.reset_pipes();
let mut eval_block = {
let closure_engine_state = engine_state.clone();
let mut closure_stack = stack
.captures_to_stack_preserve_out_dest(closure.captures)
.reset_pipes();
let eval_block_with_early_return = get_eval_block_with_early_return(engine_state);
let metadata = input.metadata();
let metadata_clone = metadata.clone();
move |input| {
let result = eval_block_with_early_return(
&closure_engine_state,
&mut closure_stack,
closure_engine_state.get_block(closure.block_id),
input,
);
// Make sure to drain any iterator produced to avoid unexpected behavior
result.and_then(|data| data.drain().map(|_| ()))
}
};
let eval_block_with_early_return = get_eval_block_with_early_return(engine_state);
if let PipelineData::ByteStream(stream, metadata) = input {
let span = stream.span();
let ctrlc = engine_state.ctrlc.clone();
let eval_block = {
let metadata = metadata.clone();
move |stream| eval_block(PipelineData::ByteStream(stream, metadata))
};
match input {
// Handle external streams specially, to make sure they pass through
PipelineData::ExternalStream {
stdout,
stderr,
exit_code,
span,
metadata,
trim_end_newline,
} => {
let known_size = if use_stderr {
stderr.as_ref().and_then(|s| s.known_size)
} else {
stdout.as_ref().and_then(|s| s.known_size)
};
match stream.into_source() {
ByteStreamSource::Read(read) => {
if use_stderr {
return stderr_misuse(span, head);
}
let with_stream = move |rx: mpsc::Receiver<Result<Vec<u8>, ShellError>>| {
let iter = rx.into_iter();
let input_from_channel = PipelineData::ExternalStream {
stdout: Some(RawStream::new(
Box::new(iter),
closure_engine_state.ctrlc.clone(),
span,
known_size,
)),
stderr: None,
exit_code: None,
span,
metadata: metadata_clone,
trim_end_newline,
let tee = IoTee::new(read, span, eval_block)?;
Ok(PipelineData::ByteStream(
ByteStream::read(tee, span, ctrlc),
metadata,
))
}
ByteStreamSource::File(file) => {
if use_stderr {
return stderr_misuse(span, head);
}
let tee = IoTee::new(file, span, eval_block)?;
Ok(PipelineData::ByteStream(
ByteStream::read(tee, span, ctrlc),
metadata,
))
}
ByteStreamSource::Child(mut child) => {
let stderr_thread = if use_stderr {
let stderr_thread = if let Some(stderr) = child.stderr.take() {
match stack.stderr() {
OutDest::Pipe | OutDest::Capture => {
let tee = IoTee::new(stderr, span, eval_block)?;
child.stderr = Some(ChildPipe::Tee(Box::new(tee)));
None
}
OutDest::Null => Some(tee_pipe_on_thread(
stderr,
io::sink(),
span,
ctrlc.as_ref(),
eval_block,
)?),
OutDest::Inherit => Some(tee_pipe_on_thread(
stderr,
io::stderr(),
span,
ctrlc.as_ref(),
eval_block,
)?),
OutDest::File(file) => Some(tee_pipe_on_thread(
stderr,
file.clone(),
span,
ctrlc.as_ref(),
eval_block,
)?),
}
} else {
None
};
if let Some(stdout) = child.stdout.take() {
match stack.stdout() {
OutDest::Pipe | OutDest::Capture => {
child.stdout = Some(stdout);
Ok(())
}
OutDest::Null => {
copy_pipe(stdout, io::sink(), span, ctrlc.as_deref())
}
OutDest::Inherit => {
copy_pipe(stdout, io::stdout(), span, ctrlc.as_deref())
}
OutDest::File(file) => {
copy_pipe(stdout, file.as_ref(), span, ctrlc.as_deref())
}
}?;
}
stderr_thread
} else {
let stderr_thread = if let Some(stderr) = child.stderr.take() {
match stack.stderr() {
OutDest::Pipe | OutDest::Capture => {
child.stderr = Some(stderr);
Ok(None)
}
OutDest::Null => {
copy_pipe_on_thread(stderr, io::sink(), span, ctrlc.as_ref())
.map(Some)
}
OutDest::Inherit => {
copy_pipe_on_thread(stderr, io::stderr(), span, ctrlc.as_ref())
.map(Some)
}
OutDest::File(file) => {
copy_pipe_on_thread(stderr, file.clone(), span, ctrlc.as_ref())
.map(Some)
}
}?
} else {
None
};
if let Some(stdout) = child.stdout.take() {
match stack.stdout() {
OutDest::Pipe | OutDest::Capture => {
let tee = IoTee::new(stdout, span, eval_block)?;
child.stdout = Some(ChildPipe::Tee(Box::new(tee)));
Ok(())
}
OutDest::Null => {
tee_pipe(stdout, io::sink(), span, ctrlc.as_deref(), eval_block)
}
OutDest::Inherit => tee_pipe(
stdout,
io::stdout(),
span,
ctrlc.as_deref(),
eval_block,
),
OutDest::File(file) => tee_pipe(
stdout,
file.as_ref(),
span,
ctrlc.as_deref(),
eval_block,
),
}?;
}
stderr_thread
};
let result = eval_block_with_early_return(
&closure_engine_state,
&mut closure_stack,
closure_engine_state.get_block(block_id),
input_from_channel,
);
// Make sure to drain any iterator produced to avoid unexpected behavior
result.and_then(|data| data.drain())
};
if use_stderr {
let stderr = stderr
.map(|stderr| {
let iter = tee(stderr.stream, with_stream).err_span(head)?;
Ok::<_, ShellError>(RawStream::new(
Box::new(iter.map(flatten_result)),
stderr.ctrlc,
stderr.span,
stderr.known_size,
))
})
.transpose()?;
Ok(PipelineData::ExternalStream {
stdout,
stderr,
exit_code,
span,
metadata,
trim_end_newline,
})
} else {
let stdout = stdout
.map(|stdout| {
let iter = tee(stdout.stream, with_stream).err_span(head)?;
Ok::<_, ShellError>(RawStream::new(
Box::new(iter.map(flatten_result)),
stdout.ctrlc,
stdout.span,
stdout.known_size,
))
})
.transpose()?;
Ok(PipelineData::ExternalStream {
stdout,
stderr,
exit_code,
span,
metadata,
trim_end_newline,
})
if child.stdout.is_some() || child.stderr.is_some() {
Ok(PipelineData::ByteStream(
ByteStream::child(*child, span),
metadata,
))
} else {
if let Some(thread) = stderr_thread {
thread.join().unwrap_or_else(|_| Err(panic_error()))?;
}
child.wait()?;
Ok(PipelineData::Empty)
}
}
}
// --stderr is not allowed if the input is not an external stream
_ if use_stderr => Err(ShellError::UnsupportedInput {
msg: "--stderr can only be used on external streams".into(),
input: "the input to `tee` is not an external stream".into(),
msg_span: head,
input_span: input.span().unwrap_or(head),
}),
// Handle others with the plain iterator
_ => {
let teed = tee(input.into_iter(), move |rx| {
let input_from_channel = rx.into_pipeline_data_with_metadata(
head,
closure_engine_state.ctrlc.clone(),
metadata_clone,
);
let result = eval_block_with_early_return(
&closure_engine_state,
&mut closure_stack,
closure_engine_state.get_block(block_id),
input_from_channel,
);
// Make sure to drain any iterator produced to avoid unexpected behavior
result.and_then(|data| data.drain())
})
.err_span(head)?
.map(move |result| result.unwrap_or_else(|err| Value::error(err, closure_span)))
.into_pipeline_data_with_metadata(
head,
engine_state.ctrlc.clone(),
metadata,
);
Ok(teed)
} else {
if use_stderr {
return stderr_misuse(input.span().unwrap_or(head), head);
}
let span = input.span().unwrap_or(head);
let ctrlc = engine_state.ctrlc.clone();
let metadata = input.metadata();
let metadata_clone = metadata.clone();
Ok(tee(input.into_iter(), move |rx| {
let input = rx.into_pipeline_data_with_metadata(span, ctrlc, metadata_clone);
eval_block(input)
})
.err_span(call.head)?
.map(move |result| result.unwrap_or_else(|err| Value::error(err, closure_span)))
.into_pipeline_data_with_metadata(
span,
engine_state.ctrlc.clone(),
metadata,
))
}
}
@ -213,10 +291,6 @@ fn panic_error() -> ShellError {
}
}
fn flatten_result<T, E>(result: Result<Result<T, E>, E>) -> Result<T, E> {
result.unwrap_or_else(Err)
}
/// Copies the iterator to a channel on another thread. If an error is produced on that thread,
/// it is embedded in the resulting iterator as an `Err` as soon as possible. When the iterator
/// finishes, it waits for the other thread to finish, also handling any error produced at that
@ -233,7 +307,7 @@ where
let mut thread = Some(
thread::Builder::new()
.name("stderr consumer".into())
.name("tee".into())
.spawn(move || with_cloned_stream(rx))?,
);
@ -273,6 +347,134 @@ where
}))
}
fn stderr_misuse<T>(span: Span, head: Span) -> Result<T, ShellError> {
Err(ShellError::UnsupportedInput {
msg: "--stderr can only be used on external commands".into(),
input: "the input to `tee` is not an external commands".into(),
msg_span: head,
input_span: span,
})
}
struct IoTee<R: Read> {
reader: R,
sender: Option<Sender<Vec<u8>>>,
thread: Option<JoinHandle<Result<(), ShellError>>>,
}
impl<R: Read> IoTee<R> {
fn new(
reader: R,
span: Span,
eval_block: impl FnOnce(ByteStream) -> Result<(), ShellError> + Send + 'static,
) -> Result<Self, ShellError> {
let (sender, receiver) = mpsc::channel();
let thread = thread::Builder::new()
.name("tee".into())
.spawn(move || eval_block(ByteStream::from_iter(receiver, span, None)))
.err_span(span)?;
Ok(Self {
reader,
sender: Some(sender),
thread: Some(thread),
})
}
}
impl<R: Read> Read for IoTee<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if let Some(thread) = self.thread.take() {
if thread.is_finished() {
if let Err(err) = thread.join().unwrap_or_else(|_| Err(panic_error())) {
return Err(io::Error::new(io::ErrorKind::Other, err));
}
} else {
self.thread = Some(thread)
}
}
let len = self.reader.read(buf)?;
if len == 0 {
self.sender = None;
if let Some(thread) = self.thread.take() {
if let Err(err) = thread.join().unwrap_or_else(|_| Err(panic_error())) {
return Err(io::Error::new(io::ErrorKind::Other, err));
}
}
} else if let Some(sender) = self.sender.as_mut() {
if sender.send(buf[..len].to_vec()).is_err() {
self.sender = None;
}
}
Ok(len)
}
}
fn tee_pipe(
pipe: ChildPipe,
mut dest: impl Write,
span: Span,
ctrlc: Option<&AtomicBool>,
eval_block: impl FnOnce(ByteStream) -> Result<(), ShellError> + Send + 'static,
) -> Result<(), ShellError> {
match pipe {
ChildPipe::Pipe(pipe) => {
let mut tee = IoTee::new(pipe, span, eval_block)?;
copy_with_interrupt(&mut tee, &mut dest, span, ctrlc)?;
}
ChildPipe::Tee(tee) => {
let mut tee = IoTee::new(tee, span, eval_block)?;
copy_with_interrupt(&mut tee, &mut dest, span, ctrlc)?;
}
}
Ok(())
}
fn tee_pipe_on_thread(
pipe: ChildPipe,
dest: impl Write + Send + 'static,
span: Span,
ctrlc: Option<&Arc<AtomicBool>>,
eval_block: impl FnOnce(ByteStream) -> Result<(), ShellError> + Send + 'static,
) -> Result<JoinHandle<Result<(), ShellError>>, ShellError> {
let ctrlc = ctrlc.cloned();
thread::Builder::new()
.name("stderr tee".into())
.spawn(move || tee_pipe(pipe, dest, span, ctrlc.as_deref(), eval_block))
.map_err(|e| e.into_spanned(span).into())
}
fn copy_pipe(
pipe: ChildPipe,
mut dest: impl Write,
span: Span,
ctrlc: Option<&AtomicBool>,
) -> Result<(), ShellError> {
match pipe {
ChildPipe::Pipe(mut pipe) => {
copy_with_interrupt(&mut pipe, &mut dest, span, ctrlc)?;
}
ChildPipe::Tee(mut tee) => {
copy_with_interrupt(&mut tee, &mut dest, span, ctrlc)?;
}
}
Ok(())
}
fn copy_pipe_on_thread(
pipe: ChildPipe,
dest: impl Write + Send + 'static,
span: Span,
ctrlc: Option<&Arc<AtomicBool>>,
) -> Result<JoinHandle<Result<(), ShellError>>, ShellError> {
let ctrlc = ctrlc.cloned();
thread::Builder::new()
.name("stderr copier".into())
.spawn(move || copy_pipe(pipe, dest, span, ctrlc.as_deref()))
.map_err(|e| e.into_spanned(span).into())
}
#[test]
fn tee_copies_values_to_other_thread_and_passes_them_through() {
let (tx, rx) = mpsc::channel();

View File

@ -225,8 +225,8 @@ fn update(
type_name: "empty pipeline".to_string(),
span: head,
}),
PipelineData::ExternalStream { .. } => Err(ShellError::IncompatiblePathAccess {
type_name: "external stream".to_string(),
PipelineData::ByteStream(..) => Err(ShellError::IncompatiblePathAccess {
type_name: "byte stream".to_string(),
span: head,
}),
}
@ -250,7 +250,7 @@ fn update_value_by_closure(
let new_value = closure
.add_arg(arg.clone())
.run_with_input(value_at_path.into_pipeline_data())?
.into_value(span);
.into_value(span)?;
value.update_data_at_cell_path(cell_path, new_value)
}
@ -273,7 +273,7 @@ fn update_single_value_by_closure(
let new_value = closure
.add_arg(arg.clone())
.run_with_input(value_at_path.into_pipeline_data())?
.into_value(span);
.into_value(span)?;
value.update_data_at_cell_path(cell_path, new_value)
}

View File

@ -218,7 +218,7 @@ fn upsert(
if let Value::Closure { val, .. } = replacement {
ClosureEvalOnce::new(engine_state, stack, *val)
.run_with_value(value)?
.into_value(head)
.into_value(head)?
} else {
replacement
}
@ -285,8 +285,8 @@ fn upsert(
type_name: "empty pipeline".to_string(),
span: head,
}),
PipelineData::ExternalStream { .. } => Err(ShellError::IncompatiblePathAccess {
type_name: "external stream".to_string(),
PipelineData::ByteStream(..) => Err(ShellError::IncompatiblePathAccess {
type_name: "byte stream".to_string(),
span: head,
}),
}
@ -311,7 +311,11 @@ fn upsert_value_by_closure(
.map(IntoPipelineData::into_pipeline_data)
.unwrap_or(PipelineData::Empty);
let new_value = closure.add_arg(arg).run_with_input(input)?.into_value(span);
let new_value = closure
.add_arg(arg)
.run_with_input(input)?
.into_value(span)?;
value.upsert_data_at_cell_path(cell_path, new_value)
}
@ -334,7 +338,11 @@ fn upsert_single_value_by_closure(
.map(IntoPipelineData::into_pipeline_data)
.unwrap_or(PipelineData::Empty);
let new_value = closure.add_arg(arg).run_with_input(input)?.into_value(span);
let new_value = closure
.add_arg(arg)
.run_with_input(input)?
.into_value(span)?;
value.upsert_data_at_cell_path(cell_path, new_value)
}

View File

@ -36,7 +36,7 @@ pub fn boolean_fold(
break;
}
let pred = closure.run_with_value(value)?.into_value(head).is_true();
let pred = closure.run_with_value(value)?.into_value(head)?.is_true();
if pred == accumulator {
return Ok(Value::bool(accumulator, head).into_pipeline_data());

View File

@ -180,13 +180,11 @@ fn values(
Err(err) => Err(err),
}
}
PipelineData::ExternalStream { .. } => Err(ShellError::OnlySupportsThisInputType {
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record or table".into(),
wrong_type: "raw data".into(),
wrong_type: "byte stream".into(),
dst_span: head,
src_span: input
.span()
.expect("PipelineData::ExternalStream had no span"),
src_span: stream.span(),
}),
}
}

View File

@ -57,9 +57,14 @@ not supported."#
let metadata = input.metadata();
Ok(input
.into_iter_strict(head)?
.filter_map(move |value| match closure.run_with_value(value.clone()) {
Ok(data) => data.into_value(head).is_true().then_some(value),
Err(err) => Some(Value::error(err, head)),
.filter_map(move |value| {
match closure
.run_with_value(value.clone())
.and_then(|data| data.into_value(head))
{
Ok(cond) => cond.is_true().then_some(value),
Err(err) => Some(Value::error(err, head)),
}
})
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
}

View File

@ -43,8 +43,8 @@ impl Command for Wrap {
.into_iter()
.map(move |x| Value::record(record! { name.clone() => x }, span))
.into_pipeline_data_with_metadata(span, engine_state.ctrlc.clone(), metadata)),
PipelineData::ExternalStream { .. } => Ok(Value::record(
record! { name => input.into_value(span) },
PipelineData::ByteStream(stream, ..) => Ok(Value::record(
record! { name => stream.into_value()? },
span,
)
.into_pipeline_data_with_metadata(metadata)),

View File

@ -59,7 +59,7 @@ impl Command for FromJson {
let (string_input, span, metadata) = input.collect_string_strict(span)?;
if string_input.is_empty() {
return Ok(PipelineData::new_with_metadata(metadata, span));
return Ok(Value::nothing(span).into_pipeline_data());
}
let strict = call.has_flag(engine_state, stack, "strict")?;

View File

@ -2,9 +2,8 @@
// implementation here is unique.
use std::{
collections::VecDeque,
error::Error,
io::{self, Cursor, ErrorKind, Write},
io::{self, Cursor, ErrorKind},
string::FromUtf8Error,
sync::{atomic::AtomicBool, Arc},
};
@ -12,7 +11,6 @@ use std::{
use byteorder::{BigEndian, ReadBytesExt};
use chrono::{TimeZone, Utc};
use nu_engine::command_prelude::*;
use nu_protocol::RawStream;
use rmp::decode::{self as mp, ValueReadError};
/// Max recursion depth
@ -121,12 +119,20 @@ MessagePack: https://msgpack.org/
read_msgpack(Cursor::new(bytes), opts)
}
// Deserialize from a raw stream directly without having to collect it
PipelineData::ExternalStream {
stdout: Some(raw_stream),
..
} => read_msgpack(ReadRawStream::new(raw_stream), opts),
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
if let Some(reader) = stream.reader() {
read_msgpack(reader, opts)
} else {
Err(ShellError::PipelineMismatch {
exp_input_type: "binary or byte stream".into(),
dst_span: call.head,
src_span: span,
})
}
}
input => Err(ShellError::PipelineMismatch {
exp_input_type: "binary".into(),
exp_input_type: "binary or byte stream".into(),
dst_span: call.head,
src_span: input.span().unwrap_or(call.head),
}),
@ -483,57 +489,6 @@ where
.map_err(|err| ReadError::Io(err, span))
}
/// Adapter to read MessagePack from a `RawStream`
///
/// TODO: contribute this back to `RawStream` in general, with more polish, if it works
pub(crate) struct ReadRawStream {
pub stream: RawStream,
// Use a `VecDeque` for read efficiency
pub leftover: VecDeque<u8>,
}
impl ReadRawStream {
pub(crate) fn new(mut stream: RawStream) -> ReadRawStream {
ReadRawStream {
leftover: std::mem::take(&mut stream.leftover).into(),
stream,
}
}
}
impl io::Read for ReadRawStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if buf.is_empty() {
Ok(0)
} else if !self.leftover.is_empty() {
// Take as many leftover bytes as possible
self.leftover.read(buf)
} else {
// Try to get data from the RawStream. We have to be careful not to break on a zero-len
// buffer though, since that would mean EOF
loop {
if let Some(result) = self.stream.stream.next() {
let bytes = result.map_err(|err| io::Error::new(ErrorKind::Other, err))?;
if !bytes.is_empty() {
let min_len = bytes.len().min(buf.len());
let (source, leftover_bytes) = bytes.split_at(min_len);
buf[0..min_len].copy_from_slice(source);
// Keep whatever bytes we couldn't use in the leftover vec
self.leftover.write_all(leftover_bytes)?;
return Ok(min_len);
} else {
// Zero-length buf, continue
continue;
}
} else {
// End of input
return Ok(0);
}
}
}
}
}
/// Return an error if this is not the end of file.
///
/// This can help detect if parsing succeeded incorrectly, perhaps due to corruption.

View File

@ -2,7 +2,7 @@ use std::io::Cursor;
use nu_engine::command_prelude::*;
use super::msgpack::{read_msgpack, Opts, ReadRawStream};
use super::msgpack::{read_msgpack, Opts};
const BUFFER_SIZE: usize = 65536;
@ -50,15 +50,21 @@ impl Command for FromMsgpackz {
read_msgpack(reader, opts)
}
// Deserialize from a raw stream directly without having to collect it
PipelineData::ExternalStream {
stdout: Some(raw_stream),
..
} => {
let reader = brotli::Decompressor::new(ReadRawStream::new(raw_stream), BUFFER_SIZE);
read_msgpack(reader, opts)
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
if let Some(reader) = stream.reader() {
let reader = brotli::Decompressor::new(reader, BUFFER_SIZE);
read_msgpack(reader, opts)
} else {
Err(ShellError::PipelineMismatch {
exp_input_type: "binary or byte stream".into(),
dst_span: call.head,
src_span: span,
})
}
}
_ => Err(ShellError::PipelineMismatch {
exp_input_type: "binary".into(),
exp_input_type: "binary or byte stream".into(),
dst_span: call.head,
src_span: span,
}),

View File

@ -81,28 +81,32 @@ fn convert_columns(columns: &[Value]) -> Result<Vec<String>, ShellError> {
}
fn collect_binary(input: PipelineData, span: Span) -> Result<Vec<u8>, ShellError> {
let mut bytes = vec![];
let mut values = input.into_iter();
if let PipelineData::ByteStream(stream, ..) = input {
stream.into_bytes()
} else {
let mut bytes = vec![];
let mut values = input.into_iter();
loop {
match values.next() {
Some(Value::Binary { val: b, .. }) => {
bytes.extend_from_slice(&b);
loop {
match values.next() {
Some(Value::Binary { val: b, .. }) => {
bytes.extend_from_slice(&b);
}
Some(Value::Error { error, .. }) => return Err(*error),
Some(x) => {
return Err(ShellError::UnsupportedInput {
msg: "Expected binary from pipeline".to_string(),
input: "value originates from here".into(),
msg_span: span,
input_span: x.span(),
})
}
None => break,
}
Some(Value::Error { error, .. }) => return Err(*error),
Some(x) => {
return Err(ShellError::UnsupportedInput {
msg: "Expected binary from pipeline".to_string(),
input: "value originates from here".into(),
msg_span: span,
input_span: x.span(),
})
}
None => break,
}
}
Ok(bytes)
Ok(bytes)
}
}
fn from_ods(

View File

@ -82,27 +82,31 @@ fn convert_columns(columns: &[Value]) -> Result<Vec<String>, ShellError> {
}
fn collect_binary(input: PipelineData, span: Span) -> Result<Vec<u8>, ShellError> {
let mut bytes = vec![];
let mut values = input.into_iter();
if let PipelineData::ByteStream(stream, ..) = input {
stream.into_bytes()
} else {
let mut bytes = vec![];
let mut values = input.into_iter();
loop {
match values.next() {
Some(Value::Binary { val: b, .. }) => {
bytes.extend_from_slice(&b);
loop {
match values.next() {
Some(Value::Binary { val: b, .. }) => {
bytes.extend_from_slice(&b);
}
Some(x) => {
return Err(ShellError::UnsupportedInput {
msg: "Expected binary from pipeline".to_string(),
input: "value originates from here".into(),
msg_span: span,
input_span: x.span(),
})
}
None => break,
}
Some(x) => {
return Err(ShellError::UnsupportedInput {
msg: "Expected binary from pipeline".to_string(),
input: "value originates from here".into(),
msg_span: span,
input_span: x.span(),
})
}
None => break,
}
}
Ok(bytes)
Ok(bytes)
}
}
fn from_xlsx(

View File

@ -150,7 +150,7 @@ pub fn to_delimited_data(
span: Span,
config: &Config,
) -> Result<PipelineData, ShellError> {
let value = input.into_value(span);
let value = input.into_value(span)?;
let output = match from_value_to_delimited_string(&value, sep, config, span) {
Ok(mut x) => {
if noheaders {

View File

@ -46,7 +46,7 @@ impl Command for ToJson {
let span = call.head;
// allow ranges to expand and turn into array
let input = input.try_expand_range()?;
let value = input.into_value(span);
let value = input.into_value(span)?;
let json_value = value_to_json_value(&value)?;
let json_result = if raw {

View File

@ -75,7 +75,7 @@ MessagePack: https://msgpack.org/
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let value_span = input.span().unwrap_or(call.head);
let value = input.into_value(value_span);
let value = input.into_value(value_span)?;
let mut out = vec![];
write_value(&mut out, &value, 0)?;

View File

@ -70,7 +70,7 @@ impl Command for ToMsgpackz {
.transpose()?;
let value_span = input.span().unwrap_or(call.head);
let value = input.into_value(value_span);
let value = input.into_value(value_span)?;
let mut out_buf = vec![];
let mut out = brotli::CompressorWriter::new(
&mut out_buf,

View File

@ -53,7 +53,7 @@ impl Command for ToNuon {
};
let span = call.head;
let value = input.into_value(span);
let value = input.into_value(span)?;
match nuon::to_nuon(&value, style, Some(span)) {
Ok(serde_nuon_string) => {

View File

@ -1,6 +1,12 @@
use chrono_humanize::HumanTime;
use nu_engine::command_prelude::*;
use nu_protocol::{format_duration, format_filesize_from_conf, Config, RawStream, ValueIterator};
use nu_protocol::{format_duration, format_filesize_from_conf, ByteStream, Config};
const LINE_ENDING: &str = if cfg!(target_os = "windows") {
"\r\n"
} else {
"\n"
};
#[derive(Clone)]
pub struct ToText;
@ -28,39 +34,28 @@ impl Command for ToText {
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let span = call.head;
let config = engine_state.get_config();
let line_ending = if cfg!(target_os = "windows") {
"\r\n"
} else {
"\n"
};
let input = input.try_expand_range()?;
if let PipelineData::ListStream(stream, _) = input {
Ok(PipelineData::ExternalStream {
stdout: Some(RawStream::new(
Box::new(ListStreamIterator {
stream: stream.into_inner(),
separator: line_ending.into(),
config: config.clone(),
}),
engine_state.ctrlc.clone(),
span,
None,
)),
stderr: None,
exit_code: None,
span,
metadata: None,
trim_end_newline: false,
})
} else {
// FIXME: don't collect! stream the output wherever possible!
// Even if the data is collected when it arrives at `to text`, we should be able to stream it out
let collected_input = local_into_string(input.into_value(span), line_ending, config);
Ok(Value::string(collected_input, span).into_pipeline_data())
match input {
PipelineData::Empty => Ok(Value::string(String::new(), span).into_pipeline_data()),
PipelineData::Value(value, ..) => {
let str = local_into_string(value, LINE_ENDING, engine_state.get_config());
Ok(Value::string(str, span).into_pipeline_data())
}
PipelineData::ListStream(stream, meta) => {
let span = stream.span();
let config = engine_state.get_config().clone();
let iter = stream.into_inner().map(move |value| {
let mut str = local_into_string(value, LINE_ENDING, &config);
str.push_str(LINE_ENDING);
str
});
Ok(PipelineData::ByteStream(
ByteStream::from_iter(iter, span, engine_state.ctrlc.clone()),
meta,
))
}
PipelineData::ByteStream(stream, meta) => Ok(PipelineData::ByteStream(stream, meta)),
}
}
@ -85,26 +80,6 @@ impl Command for ToText {
}
}
struct ListStreamIterator {
stream: ValueIterator,
separator: String,
config: Config,
}
impl Iterator for ListStreamIterator {
type Item = Result<Vec<u8>, ShellError>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(item) = self.stream.next() {
let mut string = local_into_string(item, &self.separator, &self.config);
string.push_str(&self.separator);
Some(Ok(string.as_bytes().to_vec()))
} else {
None
}
}
}
fn local_into_string(value: Value, separator: &str, config: &Config) -> String {
let span = value.span();
match value {

View File

@ -141,7 +141,7 @@ fn to_toml(
input: PipelineData,
span: Span,
) -> Result<PipelineData, ShellError> {
let value = input.into_value(span);
let value = input.into_value(span)?;
let toml_value = value_to_toml_value(engine_state, &value, span)?;
match toml_value {

View File

@ -132,7 +132,7 @@ impl Job {
}
fn run(mut self, input: PipelineData, head: Span) -> Result<PipelineData, ShellError> {
let value = input.into_value(head);
let value = input.into_value(head)?;
self.write_xml_entry(value, true).and_then(|_| {
let b = self.writer.into_inner().into_inner();

View File

@ -95,7 +95,7 @@ pub fn value_to_yaml_value(v: &Value) -> Result<serde_yaml::Value, ShellError> {
}
fn to_yaml(input: PipelineData, head: Span) -> Result<PipelineData, ShellError> {
let value = input.into_value(head);
let value = input.into_value(head)?;
let yaml_value = value_to_yaml_value(&value)?;
match serde_yaml::to_string(&yaml_value) {

View File

@ -158,14 +158,16 @@ used as the next argument to the closure, otherwise generation stops.
}
Ok(other) => {
let val = other.into_value(head);
let error = ShellError::GenericError {
error: "Invalid block return".into(),
msg: format!("Expected record, found {}", val.get_type()),
span: Some(val.span()),
help: None,
inner: vec![],
};
let error = other
.into_value(head)
.map(|val| ShellError::GenericError {
error: "Invalid block return".into(),
msg: format!("Expected record, found {}", val.get_type()),
span: Some(val.span()),
help: None,
inner: vec![],
})
.unwrap_or_else(|err| err);
(Some(Value::error(error, head)), None)
}

View File

@ -1,7 +1,6 @@
use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*;
use std::marker::PhantomData;
use std::{io::Write, marker::PhantomData};
pub trait HashDigest: digest::Digest + Clone {
fn name() -> &'static str;
@ -38,7 +37,7 @@ impl CmdArgument for Arguments {
impl<D> Command for GenericDigest<D>
where
D: HashDigest + Send + Sync + 'static,
D: HashDigest + Write + Send + Sync + 'static,
digest::Output<D>: core::fmt::LowerHex,
{
fn name(&self) -> &str {
@ -81,54 +80,23 @@ where
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
let binary = call.has_flag(engine_state, stack, "binary")?;
let cell_paths: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let args = Arguments { binary, cell_paths };
let mut hasher = D::new();
match input {
PipelineData::ExternalStream {
stdout: Some(stream),
span,
..
} => {
for item in stream {
match item {
// String and binary data are valid byte patterns
Ok(Value::String { val, .. }) => hasher.update(val.as_bytes()),
Ok(Value::Binary { val, .. }) => hasher.update(val),
// If any Error value is output, echo it back
Ok(v @ Value::Error { .. }) => return Ok(v.into_pipeline_data()),
// Unsupported data
Ok(other) => {
return Ok(Value::error(
ShellError::OnlySupportsThisInputType {
exp_input_type: "string and binary".into(),
wrong_type: other.get_type().to_string(),
dst_span: span,
src_span: other.span(),
},
span,
)
.into_pipeline_data());
}
Err(err) => return Err(err),
};
}
let digest = hasher.finalize();
if args.binary {
Ok(Value::binary(digest.to_vec(), span).into_pipeline_data())
} else {
Ok(Value::string(format!("{digest:x}"), span).into_pipeline_data())
}
if let PipelineData::ByteStream(stream, ..) = input {
stream.write_to(&mut hasher)?;
let digest = hasher.finalize();
if binary {
Ok(Value::binary(digest.to_vec(), head).into_pipeline_data())
} else {
Ok(Value::string(format!("{digest:x}"), head).into_pipeline_data())
}
_ => operate(
action::<D>,
args,
input,
call.head,
engine_state.ctrlc.clone(),
),
} else {
let args = Arguments { binary, cell_paths };
operate(action::<D>, args, input, head, engine_state.ctrlc.clone())
}
}
}

View File

@ -409,15 +409,15 @@ fn display(help: &str, engine_state: &EngineState, stack: &mut Stack, span: Span
//TODO: support no-color mode
if let Some(highlighter) = engine_state.find_decl(b"nu-highlight", &[]) {
let decl = engine_state.get_decl(highlighter);
if let Ok(output) = decl.run(
let result = decl.run(
engine_state,
stack,
&Call::new(span),
Value::string(item, Span::unknown()).into_pipeline_data(),
) {
let result = output.into_value(Span::unknown());
match result.coerce_into_string() {
);
if let Ok(value) = result.and_then(|data| data.into_value(Span::unknown())) {
match value.coerce_into_string() {
Ok(s) => {
build.push_str(&s);
}

View File

@ -5,10 +5,9 @@ use base64::{
Engine,
};
use nu_engine::command_prelude::*;
use nu_protocol::{BufferedReader, RawStream};
use nu_protocol::ByteStream;
use std::{
collections::HashMap,
io::BufReader,
path::PathBuf,
str::FromStr,
sync::{
@ -119,21 +118,11 @@ pub fn response_to_buffer(
};
let reader = response.into_reader();
let buffered_input = BufReader::new(reader);
PipelineData::ExternalStream {
stdout: Some(RawStream::new(
Box::new(BufferedReader::new(buffered_input)),
engine_state.ctrlc.clone(),
span,
buffer_size,
)),
stderr: None,
exit_code: None,
span,
metadata: None,
trim_end_newline: false,
}
PipelineData::ByteStream(
ByteStream::read(reader, span, engine_state.ctrlc.clone()).with_known_size(buffer_size),
None,
)
}
pub fn request_add_authorization_header(
@ -529,25 +518,25 @@ fn request_handle_response_content(
if flags.full {
let response_status = resp.status();
let request_headers_value = match headers_to_nu(&extract_request_headers(&request), span) {
Ok(headers) => headers.into_value(span),
Err(_) => Value::nothing(span),
};
let request_headers_value = headers_to_nu(&extract_request_headers(&request), span)
.and_then(|data| data.into_value(span))
.unwrap_or(Value::nothing(span));
let response_headers_value = match headers_to_nu(&extract_response_headers(&resp), span) {
Ok(headers) => headers.into_value(span),
Err(_) => Value::nothing(span),
};
let response_headers_value = headers_to_nu(&extract_response_headers(&resp), span)
.and_then(|data| data.into_value(span))
.unwrap_or(Value::nothing(span));
let headers = record! {
"request" => request_headers_value,
"response" => response_headers_value,
};
let body = consume_response_body(resp)?.into_value(span)?;
let full_response = Value::record(
record! {
"headers" => Value::record(headers, span),
"body" => consume_response_body(resp)?.into_value(span),
"body" => body,
"status" => Value::int(response_status as i64, span),
},
span,

View File

@ -42,7 +42,7 @@ impl Command for SubCommand {
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
parse(input.into_value(call.head), call.head, engine_state)
parse(input.into_value(call.head)?, call.head, engine_state)
}
fn examples(&self) -> Vec<Example> {

View File

@ -171,8 +171,8 @@ fn run(call: &Call, args: &Arguments, input: PipelineData) -> Result<PipelineDat
match input {
PipelineData::Value(val, md) => Ok(PipelineData::Value(handle_value(val, args, head), md)),
PipelineData::ListStream(..) => Ok(PipelineData::Value(
handle_value(input.into_value(head), args, head),
PipelineData::ListStream(stream, ..) => Ok(PipelineData::Value(
handle_value(stream.into_value(), args, head),
metadata,
)),
PipelineData::Empty { .. } => Err(ShellError::PipelineEmpty { dst_span: head }),

View File

@ -6,8 +6,6 @@ use std::fmt;
pub struct NuProgressBar {
pub pb: ProgressBar,
bytes_processed: u64,
total_bytes: Option<u64>,
}
impl NuProgressBar {
@ -40,8 +38,6 @@ impl NuProgressBar {
NuProgressBar {
pb: new_progress_bar,
total_bytes: None,
bytes_processed: 0,
}
}
@ -57,12 +53,4 @@ impl NuProgressBar {
pub fn abandoned_msg(&self, msg: String) {
self.pb.abandon_with_message(msg);
}
pub fn clone(&self) -> NuProgressBar {
NuProgressBar {
pb: self.pb.clone(),
bytes_processed: self.bytes_processed,
total_bytes: self.total_bytes,
}
}
}

View File

@ -57,16 +57,12 @@ documentation link at https://docs.rs/encoding_rs/latest/encoding_rs/#statics"#
let encoding: Option<Spanned<String>> = call.opt(engine_state, stack, 0)?;
match input {
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
span: input_span,
..
} => {
let bytes: Vec<u8> = stream.into_bytes()?.item;
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
let bytes = stream.into_bytes()?;
match encoding {
Some(encoding_name) => super::encoding::decode(head, encoding_name, &bytes),
None => super::encoding::detect_encoding_name(head, input_span, &bytes)
None => super::encoding::detect_encoding_name(head, span, &bytes)
.map(|encoding| encoding.decode(&bytes).0.into_owned())
.map(|s| Value::string(s, head)),
}

View File

@ -81,13 +81,10 @@ documentation link at https://docs.rs/encoding_rs/latest/encoding_rs/#statics"#
let ignore_errors = call.has_flag(engine_state, stack, "ignore-errors")?;
match input {
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
let s = stream.into_string()?;
super::encoding::encode(head, encoding, &s.item, s.span, ignore_errors)
super::encoding::encode(head, encoding, &s, span, ignore_errors)
.map(|val| val.into_pipeline_data())
}
PipelineData::Value(v, ..) => {

View File

@ -208,30 +208,21 @@ fn operate(
}
})
.into()),
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::Empty),
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
// Collect all `stream` chunks into a single `chunk` to be able to deal with matches that
// extend across chunk boundaries.
// This is a stop-gap solution until the `regex` crate supports streaming or an alternative
// solution is found.
// See https://github.com/nushell/nushell/issues/9795
let str = stream.into_string()?.item;
PipelineData::ByteStream(stream, ..) => {
if let Some(lines) = stream.lines() {
let iter = ParseIter {
captures: VecDeque::new(),
regex,
columns,
iter: lines,
span: head,
ctrlc,
};
// let iter = stream.lines();
let iter = ParseIter {
captures: VecDeque::new(),
regex,
columns,
iter: std::iter::once(Ok(str)),
span: head,
ctrlc,
};
Ok(ListStream::new(iter, head, None).into())
Ok(ListStream::new(iter, head, None).into())
} else {
Ok(PipelineData::Empty)
}
}
}
}

View File

@ -1,6 +1,5 @@
use nu_engine::command_prelude::*;
use nu_protocol::OutDest;
use std::thread;
#[derive(Clone)]
pub struct Complete;
@ -31,78 +30,53 @@ impl Command for Complete {
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
match input {
PipelineData::ExternalStream {
stdout,
stderr,
exit_code,
..
} => {
let mut record = Record::new();
// use a thread to receive stderr message.
// Or we may get a deadlock if child process sends out too much bytes to stdout.
//
// For example: in normal linux system, stdout pipe's limit is 65535 bytes.
// if child process sends out 65536 bytes, the process will be hanged because no consumer
// consumes the first 65535 bytes
// So we need a thread to receive stderr message, then the current thread can continue to consume
// stdout messages.
let stderr_handler = stderr
.map(|stderr| {
let stderr_span = stderr.span;
thread::Builder::new()
.name("stderr consumer".to_string())
.spawn(move || {
let stderr = stderr.into_bytes()?;
if let Ok(st) = String::from_utf8(stderr.item.clone()) {
Ok::<_, ShellError>(Value::string(st, stderr.span))
} else {
Ok::<_, ShellError>(Value::binary(stderr.item, stderr.span))
}
})
.map(|handle| (handle, stderr_span))
.err_span(call.head)
})
.transpose()?;
if let Some(stdout) = stdout {
let stdout = stdout.into_bytes()?;
record.push(
"stdout",
if let Ok(st) = String::from_utf8(stdout.item.clone()) {
Value::string(st, stdout.span)
} else {
Value::binary(stdout.item, stdout.span)
},
)
}
if let Some((handler, stderr_span)) = stderr_handler {
let res = handler.join().map_err(|err| ShellError::ExternalCommand {
label: "Fail to receive external commands stderr message".to_string(),
help: format!("{err:?}"),
span: stderr_span,
})??;
record.push("stderr", res)
PipelineData::ByteStream(stream, ..) => {
let Ok(child) = stream.into_child() else {
return Err(ShellError::GenericError {
error: "Complete only works with external commands".into(),
msg: "complete only works on external commands".into(),
span: Some(call.head),
help: None,
inner: vec![],
});
};
if let Some(exit_code) = exit_code {
let mut v: Vec<_> = exit_code.into_iter().collect();
let output = child.wait_with_output()?;
let exit_code = output.exit_status.code();
let mut record = Record::new();
if let Some(v) = v.pop() {
record.push("exit_code", v);
}
if let Some(stdout) = output.stdout {
record.push(
"stdout",
match String::from_utf8(stdout) {
Ok(str) => Value::string(str, head),
Err(err) => Value::binary(err.into_bytes(), head),
},
);
}
if let Some(stderr) = output.stderr {
record.push(
"stderr",
match String::from_utf8(stderr) {
Ok(str) => Value::string(str, head),
Err(err) => Value::binary(err.into_bytes(), head),
},
);
}
record.push("exit_code", Value::int(exit_code.into(), head));
Ok(Value::record(record, call.head).into_pipeline_data())
}
// bubble up errors from the previous command
PipelineData::Value(Value::Error { error, .. }, _) => Err(*error),
_ => Err(ShellError::GenericError {
error: "Complete only works with external streams".into(),
msg: "complete only works on external streams".into(),
span: Some(call.head),
error: "Complete only works with external commands".into(),
msg: "complete only works on external commands".into(),
span: Some(head),
help: None,
inner: vec![],
}),

View File

@ -69,18 +69,8 @@ impl Command for NuCheck {
parse_script(&mut working_set, None, &contents, is_debug, call.head)
}
}
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
let mut contents = vec![];
let raw_stream: Vec<_> = stream.stream.collect();
for r in raw_stream {
match r {
Ok(v) => contents.extend(v),
Err(error) => return Err(error),
};
}
PipelineData::ByteStream(stream, ..) => {
let contents = stream.into_bytes()?;
if as_module {
parse_module(&mut working_set, None, &contents, is_debug, call.head)
@ -160,7 +150,7 @@ impl Command for NuCheck {
result: None,
},
Example {
description: "Parse an external stream as script by showing error message",
description: "Parse a byte stream as script by showing error message",
example: "open foo.nu | nu-check --debug script.nu",
result: None,
},

View File

@ -1,16 +1,16 @@
use nu_cmd_base::hook::eval_hook;
use nu_engine::{command_prelude::*, env_to_strings, get_eval_expression};
use nu_protocol::{ast::Expr, did_you_mean, ListStream, NuGlob, OutDest, RawStream};
use nu_protocol::{ast::Expr, did_you_mean, process::ChildProcess, ByteStream, NuGlob, OutDest};
use nu_system::ForegroundChild;
use nu_utils::IgnoreCaseExt;
use os_pipe::PipeReader;
use pathdiff::diff_paths;
use std::{
collections::HashMap,
io::{BufRead, BufReader, Read, Write},
io::Write,
path::{Path, PathBuf},
process::{Command as CommandSys, Stdio},
sync::{mpsc, Arc},
sync::Arc,
thread,
};
@ -163,89 +163,124 @@ impl ExternalCommand {
) -> Result<PipelineData, ShellError> {
let head = self.name.span;
#[allow(unused_mut)]
let (cmd, mut reader) = self.create_process(&input, false, head)?;
#[cfg(all(not(unix), not(windows)))] // are there any systems like this?
let child = ForegroundChild::spawn(cmd);
#[cfg(windows)]
let child = match ForegroundChild::spawn(cmd) {
Ok(child) => Ok(child),
Err(err) => {
// Running external commands on Windows has 2 points of complication:
// 1. Some common Windows commands are actually built in to cmd.exe, not executables in their own right.
// 2. We need to let users run batch scripts etc. (.bat, .cmd) without typing their extension
let (child, reader, input) = {
// We may need to run `create_process` again, so we have to clone the underlying
// file or pipe in `input` here first.
let (input_consumed, stdin) = match &input {
PipelineData::ByteStream(stream, ..) => match stream.source() {
nu_protocol::ByteStreamSource::Read(_) => (false, Stdio::piped()),
nu_protocol::ByteStreamSource::File(file) => {
(true, file.try_clone().err_span(head)?.into())
}
nu_protocol::ByteStreamSource::Child(child) => {
if let Some(nu_protocol::process::ChildPipe::Pipe(pipe)) = &child.stdout {
(true, pipe.try_clone().err_span(head)?.into())
} else {
(false, Stdio::piped())
}
}
},
PipelineData::Empty => (false, Stdio::inherit()),
_ => (false, Stdio::piped()),
};
// To support these situations, we have a fallback path that gets run if a command
// fails to be run as a normal executable:
// 1. "shell out" to cmd.exe if the command is a known cmd.exe internal command
// 2. Otherwise, use `which-rs` to look for batch files etc. then run those in cmd.exe
let mut input = input;
let (cmd, mut reader) = self.create_process(stdin, false, head)?;
let child = match ForegroundChild::spawn(cmd) {
Ok(child) => {
if input_consumed {
input = PipelineData::Empty;
}
Ok(child)
}
Err(err) => {
// Running external commands on Windows has 2 points of complication:
// 1. Some common Windows commands are actually built in to cmd.exe, not executables in their own right.
// 2. We need to let users run batch scripts etc. (.bat, .cmd) without typing their extension
// set the default value, maybe we'll override it later
let mut child = Err(err);
// To support these situations, we have a fallback path that gets run if a command
// fails to be run as a normal executable:
// 1. "shell out" to cmd.exe if the command is a known cmd.exe internal command
// 2. Otherwise, use `which-rs` to look for batch files etc. then run those in cmd.exe
// This has the full list of cmd.exe "internal" commands: https://ss64.com/nt/syntax-internal.html
// I (Reilly) went through the full list and whittled it down to ones that are potentially useful:
const CMD_INTERNAL_COMMANDS: [&str; 9] = [
"ASSOC", "CLS", "ECHO", "FTYPE", "MKLINK", "PAUSE", "START", "VER", "VOL",
];
let command_name = &self.name.item;
let looks_like_cmd_internal = CMD_INTERNAL_COMMANDS
.iter()
.any(|&cmd| command_name.eq_ignore_ascii_case(cmd));
// set the default value, maybe we'll override it later
let mut child = Err(err);
if looks_like_cmd_internal {
let (cmd, new_reader) = self.create_process(&input, true, head)?;
reader = new_reader;
child = ForegroundChild::spawn(cmd);
} else {
#[cfg(feature = "which-support")]
{
// maybe it's a batch file (foo.cmd) and the user typed `foo`. Try to find it with `which-rs`
// TODO: clean this up with an if-let chain once those are stable
if let Ok(path) =
nu_engine::env::path_str(engine_state, stack, self.name.span)
// This has the full list of cmd.exe "internal" commands: https://ss64.com/nt/syntax-internal.html
// I (Reilly) went through the full list and whittled it down to ones that are potentially useful:
const CMD_INTERNAL_COMMANDS: [&str; 9] = [
"ASSOC", "CLS", "ECHO", "FTYPE", "MKLINK", "PAUSE", "START", "VER", "VOL",
];
let command_name = &self.name.item;
let looks_like_cmd_internal = CMD_INTERNAL_COMMANDS
.iter()
.any(|&cmd| command_name.eq_ignore_ascii_case(cmd));
let (data, stdin) = extract_stdio(input);
input = data;
if looks_like_cmd_internal {
let (cmd, new_reader) = self.create_process(stdin, true, head)?;
reader = new_reader;
child = ForegroundChild::spawn(cmd);
} else {
#[cfg(feature = "which-support")]
{
if let Some(cwd) = self.env_vars.get("PWD") {
// append cwd to PATH so `which-rs` looks in the cwd too.
// this approximates what cmd.exe does.
let path_with_cwd = format!("{};{}", cwd, path);
if let Ok(which_path) =
which::which_in(&self.name.item, Some(path_with_cwd), cwd)
{
if let Some(file_name) = which_path.file_name() {
if !file_name.to_string_lossy().eq_ignore_case(command_name)
{
// which-rs found an executable file with a slightly different name
// than the one the user tried. Let's try running it
let mut new_command = self.clone();
new_command.name = Spanned {
item: file_name.to_string_lossy().to_string(),
span: self.name.span,
};
let (cmd, new_reader) =
new_command.create_process(&input, true, head)?;
reader = new_reader;
child = ForegroundChild::spawn(cmd);
// maybe it's a batch file (foo.cmd) and the user typed `foo`. Try to find it with `which-rs`
// TODO: clean this up with an if-let chain once those are stable
if let Ok(path) =
nu_engine::env::path_str(engine_state, stack, self.name.span)
{
if let Some(cwd) = self.env_vars.get("PWD") {
// append cwd to PATH so `which-rs` looks in the cwd too.
// this approximates what cmd.exe does.
let path_with_cwd = format!("{};{}", cwd, path);
if let Ok(which_path) =
which::which_in(&self.name.item, Some(path_with_cwd), cwd)
{
if let Some(file_name) = which_path.file_name() {
if !file_name
.to_string_lossy()
.eq_ignore_case(command_name)
{
// which-rs found an executable file with a slightly different name
// than the one the user tried. Let's try running it
let mut new_command = self.clone();
new_command.name = Spanned {
item: file_name.to_string_lossy().to_string(),
span: self.name.span,
};
let (cmd, new_reader) = new_command
.create_process(stdin, true, head)?;
reader = new_reader;
child = ForegroundChild::spawn(cmd);
}
}
}
}
}
}
}
}
child
}
child
}
};
(child, reader, input)
};
#[cfg(unix)]
let child = ForegroundChild::spawn(
cmd,
engine_state.is_interactive,
&engine_state.pipeline_externals_state,
);
let (child, reader, input) = {
let (input, stdin) = extract_stdio(input);
let (cmd, reader) = self.create_process(stdin, false, head)?;
let child = ForegroundChild::spawn(
cmd,
engine_state.is_interactive,
&engine_state.pipeline_externals_state,
);
(child, reader, input)
};
match child {
Err(err) => {
@ -381,9 +416,8 @@ impl ExternalCommand {
.name("external stdin worker".to_string())
.spawn(move || {
let input = match input {
input @ PipelineData::Value(Value::Binary { .. }, ..) => {
Ok(input)
}
input @ PipelineData::ByteStream(..) => input,
input @ PipelineData::Value(Value::Binary { .. }, ..) => input,
input => {
let stack = &mut stack.start_capture();
// Attempt to render the input as a table before piping it to the external.
@ -397,143 +431,39 @@ impl ExternalCommand {
stack,
&Call::new(head),
input,
)
)?
}
};
if let Ok(input) = input {
if let PipelineData::ByteStream(stream, ..) = input {
stream.write_to(&mut stdin_write)?;
} else {
for value in input.into_iter() {
let buf = match value {
Value::String { val, .. } => val.into_bytes(),
Value::Binary { val, .. } => val,
_ => return Err(()),
};
if stdin_write.write(&buf).is_err() {
return Ok(());
}
let buf = value.coerce_into_binary()?;
stdin_write.write_all(&buf)?;
}
}
Ok(())
Ok::<_, ShellError>(())
})
.err_span(head)?;
}
}
#[cfg(unix)]
let commandname = self.name.item.clone();
let span = self.name.span;
let (exit_code_tx, exit_code_rx) = mpsc::channel();
let child =
ChildProcess::new(child, reader, matches!(self.err, OutDest::Pipe), head)?;
let (stdout, stderr) = if let Some(combined) = reader {
(
Some(RawStream::new(
Box::new(ByteLines::new(combined)),
engine_state.ctrlc.clone(),
head,
None,
)),
None,
)
} else {
let stdout = child.as_mut().stdout.take().map(|out| {
RawStream::new(
Box::new(ByteLines::new(out)),
engine_state.ctrlc.clone(),
head,
None,
)
});
let stderr = child.as_mut().stderr.take().map(|err| {
RawStream::new(
Box::new(ByteLines::new(err)),
engine_state.ctrlc.clone(),
head,
None,
)
});
if matches!(self.err, OutDest::Pipe) {
(stderr, stdout)
} else {
(stdout, stderr)
}
};
// Create a thread to wait for an exit code.
thread::Builder::new()
.name("exit code waiter".into())
.spawn(move || match child.as_mut().wait() {
Err(err) => Err(ShellError::ExternalCommand {
label: "External command exited with error".into(),
help: err.to_string(),
span,
}),
Ok(x) => {
#[cfg(unix)]
{
use nix::sys::signal::Signal;
use nu_ansi_term::{Color, Style};
use std::os::unix::process::ExitStatusExt;
if x.core_dumped() {
let cause = x
.signal()
.and_then(|sig| {
Signal::try_from(sig).ok().map(Signal::as_str)
})
.unwrap_or("Something went wrong");
let style = Style::new().bold().on(Color::Red);
let message = format!(
"{cause}: child process '{commandname}' core dumped"
);
eprintln!("{}", style.paint(&message));
let _ = exit_code_tx.send(Value::error(
ShellError::ExternalCommand {
label: "core dumped".into(),
help: message,
span: head,
},
head,
));
return Ok(());
}
}
if let Some(code) = x.code() {
let _ = exit_code_tx.send(Value::int(code as i64, head));
} else if x.success() {
let _ = exit_code_tx.send(Value::int(0, head));
} else {
let _ = exit_code_tx.send(Value::int(-1, head));
}
Ok(())
}
})
.err_span(head)?;
let exit_code = Some(ListStream::new(
ValueReceiver::new(exit_code_rx),
head,
Ok(PipelineData::ByteStream(
ByteStream::child(child, head),
None,
));
Ok(PipelineData::ExternalStream {
stdout,
stderr,
exit_code,
span: head,
metadata: None,
trim_end_newline: true,
})
))
}
}
}
pub fn create_process(
&self,
input: &PipelineData,
stdin: Stdio,
use_cmd: bool,
span: Span,
) -> Result<(CommandSys, Option<PipeReader>), ShellError> {
@ -578,11 +508,7 @@ impl ExternalCommand {
None
};
// If there is an input from the pipeline. The stdin from the process
// is piped so it can be used to send the input information
if !input.is_nothing() {
process.stdin(Stdio::piped());
}
process.stdin(stdin);
Ok((process, reader))
}
@ -764,51 +690,14 @@ fn remove_quotes(input: String) -> String {
}
}
struct ByteLines<R: Read>(BufReader<R>);
impl<R: Read> ByteLines<R> {
fn new(read: R) -> Self {
Self(BufReader::new(read))
}
}
impl<R: Read> Iterator for ByteLines<R> {
type Item = Result<Vec<u8>, ShellError>;
fn next(&mut self) -> Option<Self::Item> {
let mut buf = Vec::new();
// `read_until` will never stop reading unless `\n` or EOF is encountered,
// so let's limit the number of bytes using `take` as the Rust docs suggest.
let capacity = self.0.capacity() as u64;
let mut reader = (&mut self.0).take(capacity);
match reader.read_until(b'\n', &mut buf) {
Ok(0) => None,
Ok(_) => Some(Ok(buf)),
Err(e) => Some(Err(e.into())),
}
}
}
// Receiver used for the ListStream
// It implements iterator so it can be used as a ListStream
struct ValueReceiver {
rx: mpsc::Receiver<Value>,
}
impl ValueReceiver {
pub fn new(rx: mpsc::Receiver<Value>) -> Self {
Self { rx }
}
}
impl Iterator for ValueReceiver {
type Item = Value;
fn next(&mut self) -> Option<Self::Item> {
match self.rx.recv() {
Ok(v) => Some(v),
Err(_) => None,
}
fn extract_stdio(pipeline: PipelineData) -> (PipelineData, Stdio) {
match pipeline {
PipelineData::ByteStream(stream, metadata) => match stream.into_stdio() {
Ok(pipe) => (PipelineData::Empty, pipe),
Err(stream) => (PipelineData::ByteStream(stream, metadata), Stdio::piped()),
},
PipelineData::Empty => (PipelineData::Empty, Stdio::inherit()),
data => (data, Stdio::piped()),
}
}

View File

@ -6,7 +6,7 @@ use lscolors::{LsColors, Style};
use nu_color_config::{color_from_hex, StyleComputer, TextStyle};
use nu_engine::{command_prelude::*, env::get_config, env_to_string};
use nu_protocol::{
Config, DataSource, ListStream, PipelineMetadata, RawStream, TableMode, ValueIterator,
ByteStream, Config, DataSource, ListStream, PipelineMetadata, TableMode, ValueIterator,
};
use nu_table::{
common::create_nu_table_config, CollapsedTable, ExpandedTable, JustTable, NuTable, NuTableCell,
@ -14,8 +14,12 @@ use nu_table::{
};
use nu_utils::get_ls_colors;
use std::{
collections::VecDeque, io::IsTerminal, path::PathBuf, str::FromStr, sync::atomic::AtomicBool,
sync::Arc, time::Instant,
collections::VecDeque,
io::{Cursor, IsTerminal},
path::PathBuf,
str::FromStr,
sync::{atomic::AtomicBool, Arc},
time::Instant,
};
use terminal_size::{Height, Width};
use url::Url;
@ -360,25 +364,16 @@ fn handle_table_command(
) -> Result<PipelineData, ShellError> {
let span = input.data.span().unwrap_or(input.call.head);
match input.data {
PipelineData::ExternalStream { .. } => Ok(input.data),
PipelineData::ByteStream(..) => Ok(input.data),
PipelineData::Value(Value::Binary { val, .. }, ..) => {
let bytes = format!("{}\n", nu_pretty_hex::pretty_hex(&val)).into_bytes();
let bytes = {
let mut str = nu_pretty_hex::pretty_hex(&val);
str.push('\n');
str.into_bytes()
};
let ctrlc = input.engine_state.ctrlc.clone();
let stream = RawStream::new(
Box::new([Ok(bytes)].into_iter()),
ctrlc,
input.call.head,
None,
);
Ok(PipelineData::ExternalStream {
stdout: Some(stream),
stderr: None,
exit_code: None,
span: input.call.head,
metadata: None,
trim_end_newline: false,
})
let stream = ByteStream::read(Cursor::new(bytes), input.call.head, ctrlc);
Ok(PipelineData::ByteStream(stream, None))
}
// None of these two receive a StyleComputer because handle_row_stream() can produce it by itself using engine_state and stack.
PipelineData::Value(Value::List { vals, .. }, metadata) => {
@ -613,16 +608,8 @@ fn handle_row_stream(
ctrlc.clone(),
cfg,
);
let stream = RawStream::new(Box::new(paginator), ctrlc, input.call.head, None);
Ok(PipelineData::ExternalStream {
stdout: Some(stream),
stderr: None,
exit_code: None,
span: input.call.head,
metadata: None,
trim_end_newline: false,
})
let stream = ByteStream::from_result_iter(paginator, input.call.head, None);
Ok(PipelineData::ByteStream(stream, None))
}
fn make_clickable_link(