Replace ExternalStream with new ByteStream type (#12774)

# Description
This PR introduces a `ByteStream` type which is a `Read`-able stream of
bytes. Internally, it has an enum over three different byte stream
sources:
```rust
pub enum ByteStreamSource {
    Read(Box<dyn Read + Send + 'static>),
    File(File),
    Child(ChildProcess),
}
```

This is in comparison to the current `RawStream` type, which is an
`Iterator<Item = Vec<u8>>` and has to allocate for each read chunk.

Currently, `PipelineData::ExternalStream` serves a weird dual role where
it is either external command output or a wrapper around `RawStream`.
`ByteStream` makes this distinction more clear (via `ByteStreamSource`)
and replaces `PipelineData::ExternalStream` in this PR:
```rust
pub enum PipelineData {
    Empty,
    Value(Value, Option<PipelineMetadata>),
    ListStream(ListStream, Option<PipelineMetadata>),
    ByteStream(ByteStream, Option<PipelineMetadata>),
}
```

The PR is relatively large, but a decent amount of it is just repetitive
changes.

This PR fixes #7017, fixes #10763, and fixes #12369.

This PR also improves performance when piping external commands. Nushell
should, in most cases, have competitive pipeline throughput compared to,
e.g., bash.
| Command | Before (MB/s) | After (MB/s) | Bash (MB/s) |
| -------------------------------------------------- | -------------:|
------------:| -----------:|
| `throughput \| rg 'x'` | 3059 | 3744 | 3739 |
| `throughput \| nu --testbin relay o> /dev/null` | 3508 | 8087 | 8136 |

# User-Facing Changes
- This is a breaking change for the plugin communication protocol,
because the `ExternalStreamInfo` was replaced with `ByteStreamInfo`.
Plugins now only have to deal with a single input stream, as opposed to
the previous three streams: stdout, stderr, and exit code.
- The output of `describe` has been changed for external/byte streams.
- Temporary breaking change: `bytes starts-with` no longer works with
byte streams. This is to keep the PR smaller, and `bytes ends-with`
already does not work on byte streams.
- If a process core dumped, then instead of having a `Value::Error` in
the `exit_code` column of the output returned from `complete`, it now is
a `Value::Int` with the negation of the signal number.

# After Submitting
- Update docs and book as necessary
- Release notes (e.g., plugin protocol changes)
- Adapt/convert commands to work with byte streams (high priority is
`str length`, `bytes starts-with`, and maybe `bytes ends-with`).
- Refactor the `tee` code, Devyn has already done some work on this.

---------

Co-authored-by: Devyn Cairns <devyn.cairns@gmail.com>
This commit is contained in:
Ian Manske 2024-05-16 14:11:18 +00:00 committed by GitHub
parent 1b8eb23785
commit 6fd854ed9f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
210 changed files with 3955 additions and 4012 deletions

1
Cargo.lock generated
View File

@ -3254,6 +3254,7 @@ dependencies = [
"indexmap", "indexmap",
"lru", "lru",
"miette", "miette",
"nix",
"nu-path", "nu-path",
"nu-system", "nu-system",
"nu-test-support", "nu-test-support",

View File

@ -103,9 +103,8 @@ impl NuCompleter {
PipelineData::empty(), PipelineData::empty(),
); );
match result { match result.and_then(|data| data.into_value(span)) {
Ok(pd) => { Ok(value) => {
let value = pd.into_value(span);
if let Value::List { vals, .. } = value { if let Value::List { vals, .. } = value {
let result = let result =
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset); map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);

View File

@ -74,55 +74,53 @@ impl Completer for CustomCompletion {
// Parse result // Parse result
let suggestions = result let suggestions = result
.map(|pd| { .and_then(|data| data.into_value(span))
let value = pd.into_value(span); .map(|value| match &value {
match &value { Value::Record { val, .. } => {
Value::Record { val, .. } => { let completions = val
let completions = val .get("completions")
.get("completions") .and_then(|val| {
.and_then(|val| { val.as_list()
val.as_list() .ok()
.ok() .map(|it| map_value_completions(it.iter(), span, offset))
.map(|it| map_value_completions(it.iter(), span, offset)) })
}) .unwrap_or_default();
.unwrap_or_default(); let options = val.get("options");
let options = val.get("options");
if let Some(Value::Record { val: options, .. }) = &options { if let Some(Value::Record { val: options, .. }) = &options {
let should_sort = options let should_sort = options
.get("sort") .get("sort")
.and_then(|val| val.as_bool().ok()) .and_then(|val| val.as_bool().ok())
.unwrap_or(false); .unwrap_or(false);
if should_sort { if should_sort {
self.sort_by = SortBy::Ascending; self.sort_by = SortBy::Ascending;
}
custom_completion_options = Some(CompletionOptions {
case_sensitive: options
.get("case_sensitive")
.and_then(|val| val.as_bool().ok())
.unwrap_or(true),
positional: options
.get("positional")
.and_then(|val| val.as_bool().ok())
.unwrap_or(true),
match_algorithm: match options.get("completion_algorithm") {
Some(option) => option
.coerce_string()
.ok()
.and_then(|option| option.try_into().ok())
.unwrap_or(MatchAlgorithm::Prefix),
None => completion_options.match_algorithm,
},
});
} }
completions custom_completion_options = Some(CompletionOptions {
case_sensitive: options
.get("case_sensitive")
.and_then(|val| val.as_bool().ok())
.unwrap_or(true),
positional: options
.get("positional")
.and_then(|val| val.as_bool().ok())
.unwrap_or(true),
match_algorithm: match options.get("completion_algorithm") {
Some(option) => option
.coerce_string()
.ok()
.and_then(|option| option.try_into().ok())
.unwrap_or(MatchAlgorithm::Prefix),
None => completion_options.match_algorithm,
},
});
} }
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
_ => vec![], completions
} }
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
_ => vec![],
}) })
.unwrap_or_default(); .unwrap_or_default();

View File

@ -306,14 +306,15 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
let mut engine_state = engine_state.clone(); let mut engine_state = engine_state.clone();
let mut stack = Stack::new(); let mut stack = Stack::new();
if !eval_source( if eval_source(
&mut engine_state, &mut engine_state,
&mut stack, &mut stack,
&old_contents, &old_contents,
&old_plugin_file_path.to_string_lossy(), &old_plugin_file_path.to_string_lossy(),
PipelineData::Empty, PipelineData::Empty,
false, false,
) { ) != 0
{
return false; return false;
} }

View File

@ -1,5 +1,4 @@
use log::info; use log::info;
use miette::Result;
use nu_engine::{convert_env_values, eval_block}; use nu_engine::{convert_env_values, eval_block};
use nu_parser::parse; use nu_parser::parse;
use nu_protocol::{ use nu_protocol::{
@ -59,9 +58,10 @@ pub fn evaluate_commands(
t_mode.coerce_str()?.parse().unwrap_or_default(); t_mode.coerce_str()?.parse().unwrap_or_default();
} }
let exit_code = pipeline.print(engine_state, stack, no_newline, false)?; if let Some(status) = pipeline.print(engine_state, stack, no_newline, false)? {
if exit_code != 0 { if status.code() != 0 {
std::process::exit(exit_code as i32); std::process::exit(status.code())
}
} }
info!("evaluate {}:{}:{}", file!(), line!(), column!()); info!("evaluate {}:{}:{}", file!(), line!(), column!());

View File

@ -96,7 +96,7 @@ pub fn evaluate_file(
engine_state.merge_delta(working_set.delta)?; engine_state.merge_delta(working_set.delta)?;
// Check if the file contains a main command. // Check if the file contains a main command.
if engine_state.find_decl(b"main", &[]).is_some() { let exit_code = if engine_state.find_decl(b"main", &[]).is_some() {
// Evaluate the file, but don't run main yet. // Evaluate the file, but don't run main yet.
let pipeline = let pipeline =
match eval_block::<WithoutDebug>(engine_state, stack, &block, PipelineData::empty()) { match eval_block::<WithoutDebug>(engine_state, stack, &block, PipelineData::empty()) {
@ -109,26 +109,29 @@ pub fn evaluate_file(
}; };
// Print the pipeline output of the last command of the file. // Print the pipeline output of the last command of the file.
let exit_code = pipeline.print(engine_state, stack, true, false)?; if let Some(status) = pipeline.print(engine_state, stack, true, false)? {
if exit_code != 0 { if status.code() != 0 {
std::process::exit(exit_code as i32); std::process::exit(status.code())
}
} }
// Invoke the main command with arguments. // Invoke the main command with arguments.
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace. // Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
let args = format!("main {}", args.join(" ")); let args = format!("main {}", args.join(" "));
if !eval_source( eval_source(
engine_state, engine_state,
stack, stack,
args.as_bytes(), args.as_bytes(),
"<commandline>", "<commandline>",
input, input,
true, true,
) { )
std::process::exit(1); } else {
} eval_source(engine_state, stack, &file, file_path_str, input, true)
} else if !eval_source(engine_state, stack, &file, file_path_str, input, true) { };
std::process::exit(1);
if exit_code != 0 {
std::process::exit(exit_code)
} }
info!("evaluate {}:{}:{}", file!(), line!(), column!()); info!("evaluate {}:{}:{}", file!(), line!(), column!());

View File

@ -59,8 +59,7 @@ impl Completer for NuMenuCompleter {
let res = eval_block::<WithoutDebug>(&self.engine_state, &mut self.stack, block, input); let res = eval_block::<WithoutDebug>(&self.engine_state, &mut self.stack, block, input);
if let Ok(values) = res { if let Ok(values) = res.and_then(|data| data.into_value(self.span)) {
let values = values.into_value(self.span);
convert_to_suggestions(values, line, pos, self.only_buffer_difference) convert_to_suggestions(values, line, pos, self.only_buffer_difference)
} else { } else {
Vec::new() Vec::new()

View File

@ -4,7 +4,7 @@ use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token,
use nu_protocol::{ use nu_protocol::{
debugger::WithoutDebug, debugger::WithoutDebug,
engine::{EngineState, Stack, StateWorkingSet}, engine::{EngineState, Stack, StateWorkingSet},
print_if_stream, report_error, report_error_new, PipelineData, ShellError, Span, Value, report_error, report_error_new, PipelineData, ShellError, Span, Value,
}; };
#[cfg(windows)] #[cfg(windows)]
use nu_utils::enable_vt_processing; use nu_utils::enable_vt_processing;
@ -206,9 +206,48 @@ pub fn eval_source(
fname: &str, fname: &str,
input: PipelineData, input: PipelineData,
allow_return: bool, allow_return: bool,
) -> bool { ) -> i32 {
let start_time = std::time::Instant::now(); let start_time = std::time::Instant::now();
let exit_code = match evaluate_source(engine_state, stack, source, fname, input, allow_return) {
Ok(code) => code.unwrap_or(0),
Err(err) => {
report_error_new(engine_state, &err);
1
}
};
stack.add_env_var(
"LAST_EXIT_CODE".to_string(),
Value::int(exit_code.into(), Span::unknown()),
);
// reset vt processing, aka ansi because illbehaved externals can break it
#[cfg(windows)]
{
let _ = enable_vt_processing();
}
perf(
&format!("eval_source {}", &fname),
start_time,
file!(),
line!(),
column!(),
engine_state.get_config().use_ansi_coloring,
);
exit_code
}
fn evaluate_source(
engine_state: &mut EngineState,
stack: &mut Stack,
source: &[u8],
fname: &str,
input: PipelineData,
allow_return: bool,
) -> Result<Option<i32>, ShellError> {
let (block, delta) = { let (block, delta) = {
let mut working_set = StateWorkingSet::new(engine_state); let mut working_set = StateWorkingSet::new(engine_state);
let output = parse( let output = parse(
@ -222,97 +261,40 @@ pub fn eval_source(
} }
if let Some(err) = working_set.parse_errors.first() { if let Some(err) = working_set.parse_errors.first() {
set_last_exit_code(stack, 1);
report_error(&working_set, err); report_error(&working_set, err);
return false; return Ok(Some(1));
} }
(output, working_set.render()) (output, working_set.render())
}; };
if let Err(err) = engine_state.merge_delta(delta) { engine_state.merge_delta(delta)?;
set_last_exit_code(stack, 1);
report_error_new(engine_state, &err);
return false;
}
let b = if allow_return { let pipeline = if allow_return {
eval_block_with_early_return::<WithoutDebug>(engine_state, stack, &block, input) eval_block_with_early_return::<WithoutDebug>(engine_state, stack, &block, input)
} else { } else {
eval_block::<WithoutDebug>(engine_state, stack, &block, input) eval_block::<WithoutDebug>(engine_state, stack, &block, input)
}?;
let status = if let PipelineData::ByteStream(stream, ..) = pipeline {
stream.print(false)?
} else {
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
let pipeline = eval_hook(
engine_state,
stack,
Some(pipeline),
vec![],
&hook,
"display_output",
)?;
pipeline.print(engine_state, stack, false, false)
} else {
pipeline.print(engine_state, stack, true, false)
}?
}; };
match b { Ok(status.map(|status| status.code()))
Ok(pipeline_data) => {
let config = engine_state.get_config();
let result;
if let PipelineData::ExternalStream {
stdout: stream,
stderr: stderr_stream,
exit_code,
..
} = pipeline_data
{
result = print_if_stream(stream, stderr_stream, false, exit_code);
} else if let Some(hook) = config.hooks.display_output.clone() {
match eval_hook(
engine_state,
stack,
Some(pipeline_data),
vec![],
&hook,
"display_output",
) {
Err(err) => {
result = Err(err);
}
Ok(val) => {
result = val.print(engine_state, stack, false, false);
}
}
} else {
result = pipeline_data.print(engine_state, stack, true, false);
}
match result {
Err(err) => {
report_error_new(engine_state, &err);
return false;
}
Ok(exit_code) => {
set_last_exit_code(stack, exit_code);
}
}
// reset vt processing, aka ansi because illbehaved externals can break it
#[cfg(windows)]
{
let _ = enable_vt_processing();
}
}
Err(err) => {
set_last_exit_code(stack, 1);
report_error_new(engine_state, &err);
return false;
}
}
perf(
&format!("eval_source {}", &fname),
start_time,
file!(),
line!(),
column!(),
engine_state.get_config().use_ansi_coloring,
);
true
}
fn set_last_exit_code(stack: &mut Stack, exit_code: i64) {
stack.add_env_var(
"LAST_EXIT_CODE".to_string(),
Value::int(exit_code, Span::unknown()),
);
} }
#[cfg(test)] #[cfg(test)]

View File

@ -79,7 +79,7 @@ impl Command for CastDF {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuLazyFrame::can_downcast(&value) { if NuLazyFrame::can_downcast(&value) {
let (dtype, column_nm) = df_args(engine_state, stack, call)?; let (dtype, column_nm) = df_args(engine_state, stack, call)?;
let df = NuLazyFrame::try_from_value(value)?; let df = NuLazyFrame::try_from_value(value)?;

View File

@ -72,8 +72,7 @@ impl Command for FilterWith {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuLazyFrame::can_downcast(&value) { if NuLazyFrame::can_downcast(&value) {
let df = NuLazyFrame::try_from_value(value)?; let df = NuLazyFrame::try_from_value(value)?;
command_lazy(engine_state, stack, call, df) command_lazy(engine_state, stack, call, df)

View File

@ -86,7 +86,7 @@ impl Command for FirstDF {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
let df = NuDataFrame::try_from_value(value)?; let df = NuDataFrame::try_from_value(value)?;
command(engine_state, stack, call, df) command(engine_state, stack, call, df)

View File

@ -61,7 +61,7 @@ impl Command for LastDF {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
let df = NuDataFrame::try_from_value(value)?; let df = NuDataFrame::try_from_value(value)?;
command(engine_state, stack, call, df) command(engine_state, stack, call, df)

View File

@ -109,8 +109,7 @@ impl Command for RenameDF {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuLazyFrame::can_downcast(&value) { if NuLazyFrame::can_downcast(&value) {
let df = NuLazyFrame::try_from_value(value)?; let df = NuLazyFrame::try_from_value(value)?;
command_lazy(engine_state, stack, call, df) command_lazy(engine_state, stack, call, df)

View File

@ -76,7 +76,7 @@ impl Command for ToNu {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
dataframe_command(engine_state, stack, call, value) dataframe_command(engine_state, stack, call, value)
} else { } else {

View File

@ -102,8 +102,7 @@ impl Command for WithColumn {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuLazyFrame::can_downcast(&value) { if NuLazyFrame::can_downcast(&value) {
let df = NuLazyFrame::try_from_value(value)?; let df = NuLazyFrame::try_from_value(value)?;
command_lazy(engine_state, stack, call, df) command_lazy(engine_state, stack, call, df)

View File

@ -172,7 +172,7 @@ macro_rules! lazy_expr_command {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
let lazy = NuLazyFrame::try_from_value(value)?; let lazy = NuLazyFrame::try_from_value(value)?;
let lazy = NuLazyFrame::new( let lazy = NuLazyFrame::new(
@ -271,7 +271,7 @@ macro_rules! lazy_expr_command {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
let lazy = NuLazyFrame::try_from_value(value)?; let lazy = NuLazyFrame::try_from_value(value)?;
let lazy = NuLazyFrame::new( let lazy = NuLazyFrame::new(

View File

@ -91,7 +91,7 @@ impl Command for ExprOtherwise {
let otherwise_predicate: Value = call.req(engine_state, stack, 0)?; let otherwise_predicate: Value = call.req(engine_state, stack, 0)?;
let otherwise_predicate = NuExpression::try_from_value(otherwise_predicate)?; let otherwise_predicate = NuExpression::try_from_value(otherwise_predicate)?;
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
let complete: NuExpression = match NuWhen::try_from_value(value)? { let complete: NuExpression = match NuWhen::try_from_value(value)? {
NuWhen::Then(then) => then.otherwise(otherwise_predicate.into_polars()).into(), NuWhen::Then(then) => then.otherwise(otherwise_predicate.into_polars()).into(),
NuWhen::ChainedThen(chained_when) => chained_when NuWhen::ChainedThen(chained_when) => chained_when

View File

@ -67,7 +67,7 @@ impl Command for ExprQuantile {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
let quantile: f64 = call.req(engine_state, stack, 0)?; let quantile: f64 = call.req(engine_state, stack, 0)?;
let expr = NuExpression::try_from_value(value)?; let expr = NuExpression::try_from_value(value)?;

View File

@ -103,7 +103,7 @@ impl Command for ExprWhen {
let then_predicate: Value = call.req(engine_state, stack, 1)?; let then_predicate: Value = call.req(engine_state, stack, 1)?;
let then_predicate = NuExpression::try_from_value(then_predicate)?; let then_predicate = NuExpression::try_from_value(then_predicate)?;
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
let when_then: NuWhen = match value { let when_then: NuWhen = match value {
Value::Nothing { .. } => when(when_predicate.into_polars()) Value::Nothing { .. } => when(when_predicate.into_polars())
.then(then_predicate.into_polars()) .then(then_predicate.into_polars())

View File

@ -100,7 +100,7 @@ impl Command for LazyExplode {
} }
pub(crate) fn explode(call: &Call, input: PipelineData) -> Result<PipelineData, ShellError> { pub(crate) fn explode(call: &Call, input: PipelineData) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
let df = NuLazyFrame::try_from_value(value)?; let df = NuLazyFrame::try_from_value(value)?;
let columns: Vec<String> = call let columns: Vec<String> = call

View File

@ -82,7 +82,7 @@ impl Command for LazyFillNA {
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let fill: Value = call.req(engine_state, stack, 0)?; let fill: Value = call.req(engine_state, stack, 0)?;
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuExpression::can_downcast(&value) { if NuExpression::can_downcast(&value) {
let expr = NuExpression::try_from_value(value)?; let expr = NuExpression::try_from_value(value)?;

View File

@ -59,7 +59,7 @@ impl Command for LazyFillNull {
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let fill: Value = call.req(engine_state, stack, 0)?; let fill: Value = call.req(engine_state, stack, 0)?;
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuExpression::can_downcast(&value) { if NuExpression::can_downcast(&value) {
let expr = NuExpression::try_from_value(value)?; let expr = NuExpression::try_from_value(value)?;

View File

@ -219,7 +219,7 @@ impl Command for LazyJoin {
let suffix: Option<String> = call.get_flag(engine_state, stack, "suffix")?; let suffix: Option<String> = call.get_flag(engine_state, stack, "suffix")?;
let suffix = suffix.unwrap_or_else(|| "_x".into()); let suffix = suffix.unwrap_or_else(|| "_x".into());
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
let lazy = NuLazyFrame::try_from_value(value)?; let lazy = NuLazyFrame::try_from_value(value)?;
let from_eager = lazy.from_eager; let from_eager = lazy.from_eager;
let lazy = lazy.into_polars(); let lazy = lazy.into_polars();

View File

@ -54,7 +54,7 @@ impl Command for LazyQuantile {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
let quantile: f64 = call.req(engine_state, stack, 0)?; let quantile: f64 = call.req(engine_state, stack, 0)?;
let lazy = NuLazyFrame::try_from_value(value)?; let lazy = NuLazyFrame::try_from_value(value)?;

View File

@ -68,7 +68,7 @@ impl Command for IsNotNull {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
let df = NuDataFrame::try_from_value(value)?; let df = NuDataFrame::try_from_value(value)?;
command(engine_state, stack, call, df) command(engine_state, stack, call, df)

View File

@ -68,7 +68,7 @@ impl Command for IsNull {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
let df = NuDataFrame::try_from_value(value)?; let df = NuDataFrame::try_from_value(value)?;
command(engine_state, stack, call, df) command(engine_state, stack, call, df)

View File

@ -60,7 +60,7 @@ impl Command for NUnique {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuDataFrame::can_downcast(&value) { if NuDataFrame::can_downcast(&value) {
let df = NuDataFrame::try_from_value(value)?; let df = NuDataFrame::try_from_value(value)?;
command(engine_state, stack, call, df) command(engine_state, stack, call, df)

View File

@ -56,8 +56,7 @@ impl Command for Shift {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuLazyFrame::can_downcast(&value) { if NuLazyFrame::can_downcast(&value) {
let df = NuLazyFrame::try_from_value(value)?; let df = NuLazyFrame::try_from_value(value)?;
command_lazy(engine_state, stack, call, df) command_lazy(engine_state, stack, call, df)

View File

@ -72,8 +72,7 @@ impl Command for Unique {
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
if NuLazyFrame::can_downcast(&value) { if NuLazyFrame::can_downcast(&value) {
let df = NuLazyFrame::try_from_value(value)?; let df = NuLazyFrame::try_from_value(value)?;
command_lazy(engine_state, stack, call, df) command_lazy(engine_state, stack, call, df)

View File

@ -80,7 +80,8 @@ pub fn test_dataframe_example(engine_state: &mut Box<EngineState>, example: &Exa
let result = let result =
eval_block::<WithoutDebug>(engine_state, &mut stack, &block, PipelineData::empty()) eval_block::<WithoutDebug>(engine_state, &mut stack, &block, PipelineData::empty())
.unwrap_or_else(|err| panic!("test eval error in `{}`: {:?}", example.example, err)) .unwrap_or_else(|err| panic!("test eval error in `{}`: {:?}", example.example, err))
.into_value(Span::test_data()); .into_value(Span::test_data())
.expect("ok value");
println!("input: {}", example.example); println!("input: {}", example.example);
println!("result: {result:?}"); println!("result: {result:?}");

View File

@ -297,7 +297,7 @@ impl NuDataFrame {
} }
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> { pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
let value = input.into_value(span); let value = input.into_value(span)?;
Self::try_from_value(value) Self::try_from_value(value)
} }

View File

@ -84,7 +84,7 @@ impl NuExpression {
} }
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> { pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
let value = input.into_value(span); let value = input.into_value(span)?;
Self::try_from_value(value) Self::try_from_value(value)
} }

View File

@ -134,7 +134,7 @@ impl NuLazyFrame {
} }
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> { pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
let value = input.into_value(span); let value = input.into_value(span)?;
Self::try_from_value(value) Self::try_from_value(value)
} }

View File

@ -107,7 +107,7 @@ impl NuLazyGroupBy {
} }
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> { pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
let value = input.into_value(span); let value = input.into_value(span)?;
Self::try_from_value(value) Self::try_from_value(value)
} }
} }

View File

@ -118,22 +118,12 @@ fn into_bits(
let cell_paths = call.rest(engine_state, stack, 0)?; let cell_paths = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths); let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
match input { if let PipelineData::ByteStream(stream, ..) = input {
PipelineData::ExternalStream { stdout: None, .. } => { // TODO: in the future, we may want this to stream out, converting each to bytes
Ok(Value::binary(vec![], head).into_pipeline_data()) Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data())
} } else {
PipelineData::ExternalStream { let args = Arguments { cell_paths };
stdout: Some(stream), operate(action, args, input, call.head, engine_state.ctrlc.clone())
..
} => {
// TODO: in the future, we may want this to stream out, converting each to bytes
let output = stream.into_bytes()?;
Ok(Value::binary(output.item, head).into_pipeline_data())
}
_ => {
let args = Arguments { cell_paths };
operate(action, args, input, call.head, engine_state.ctrlc.clone())
}
} }
} }

View File

@ -78,38 +78,40 @@ impl Command for EachWhile {
| PipelineData::ListStream(..) => { | PipelineData::ListStream(..) => {
let mut closure = ClosureEval::new(engine_state, stack, closure); let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(input Ok(input
.into_iter()
.map_while(move |value| match closure.run_with_value(value) {
Ok(data) => {
let value = data.into_value(head);
(!value.is_nothing()).then_some(value)
}
Err(_) => None,
})
.fuse()
.into_pipeline_data(head, engine_state.ctrlc.clone()))
}
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(stream
.into_iter() .into_iter()
.map_while(move |value| { .map_while(move |value| {
let value = value.ok()?; match closure
match closure.run_with_value(value) { .run_with_value(value)
Ok(data) => { .and_then(|data| data.into_value(head))
let value = data.into_value(head); {
(!value.is_nothing()).then_some(value) Ok(value) => (!value.is_nothing()).then_some(value),
}
Err(_) => None, Err(_) => None,
} }
}) })
.fuse() .fuse()
.into_pipeline_data(head, engine_state.ctrlc.clone())) .into_pipeline_data(head, engine_state.ctrlc.clone()))
} }
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
if let Some(chunks) = stream.chunks() {
let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(chunks
.map_while(move |value| {
let value = value.ok()?;
match closure
.run_with_value(value)
.and_then(|data| data.into_value(span))
{
Ok(value) => (!value.is_nothing()).then_some(value),
Err(_) => None,
}
})
.fuse()
.into_pipeline_data(head, engine_state.ctrlc.clone()))
} else {
Ok(PipelineData::Empty)
}
}
// This match allows non-iterables to be accepted, // This match allows non-iterables to be accepted,
// which is currently considered undesirable (Nov 2022). // which is currently considered undesirable (Nov 2022).
PipelineData::Value(value, ..) => { PipelineData::Value(value, ..) => {

View File

@ -56,7 +56,7 @@ impl Command for RollDown {
let by: Option<usize> = call.get_flag(engine_state, stack, "by")?; let by: Option<usize> = call.get_flag(engine_state, stack, "by")?;
let metadata = input.metadata(); let metadata = input.metadata();
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Down)?; let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Down)?;
Ok(rotated_value.into_pipeline_data().set_metadata(metadata)) Ok(rotated_value.into_pipeline_data().set_metadata(metadata))

View File

@ -94,7 +94,7 @@ impl Command for RollLeft {
let metadata = input.metadata(); let metadata = input.metadata();
let cells_only = call.has_flag(engine_state, stack, "cells-only")?; let cells_only = call.has_flag(engine_state, stack, "cells-only")?;
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
let rotated_value = let rotated_value =
horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Left)?; horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Left)?;

View File

@ -94,7 +94,7 @@ impl Command for RollRight {
let metadata = input.metadata(); let metadata = input.metadata();
let cells_only = call.has_flag(engine_state, stack, "cells-only")?; let cells_only = call.has_flag(engine_state, stack, "cells-only")?;
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
let rotated_value = let rotated_value =
horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Right)?; horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Right)?;

View File

@ -56,7 +56,7 @@ impl Command for RollUp {
let by: Option<usize> = call.get_flag(engine_state, stack, "by")?; let by: Option<usize> = call.get_flag(engine_state, stack, "by")?;
let metadata = input.metadata(); let metadata = input.metadata();
let value = input.into_value(call.head); let value = input.into_value(call.head)?;
let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Up)?; let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Up)?;
Ok(rotated_value.into_pipeline_data().set_metadata(metadata)) Ok(rotated_value.into_pipeline_data().set_metadata(metadata))

View File

@ -152,7 +152,7 @@ impl Iterator for UpdateCellIterator {
fn eval_value(closure: &mut ClosureEval, span: Span, value: Value) -> Value { fn eval_value(closure: &mut ClosureEval, span: Span, value: Value) -> Value {
closure closure
.run_with_value(value) .run_with_value(value)
.map(|data| data.into_value(span)) .and_then(|data| data.into_value(span))
.unwrap_or_else(|err| Value::error(err, span)) .unwrap_or_else(|err| Value::error(err, span))
} }

View File

@ -39,7 +39,7 @@ impl Command for FormatPattern {
let mut working_set = StateWorkingSet::new(engine_state); let mut working_set = StateWorkingSet::new(engine_state);
let specified_pattern: Result<Value, ShellError> = call.req(engine_state, stack, 0); let specified_pattern: Result<Value, ShellError> = call.req(engine_state, stack, 0);
let input_val = input.into_value(call.head); let input_val = input.into_value(call.head)?;
// add '$it' variable to support format like this: $it.column1.column2. // add '$it' variable to support format like this: $it.column1.column2.
let it_id = working_set.add_variable(b"$it".to_vec(), call.head, Type::Any, false); let it_id = working_set.add_variable(b"$it".to_vec(), call.head, Type::Any, false);
stack.add_var(it_id, input_val.clone()); stack.add_var(it_id, input_val.clone());

View File

@ -19,102 +19,102 @@ fn basic_string_fails() {
assert_eq!(actual.out, ""); assert_eq!(actual.out, "");
} }
#[test] // #[test]
fn short_stream_binary() { // fn short_stream_binary() {
let actual = nu!(r#" // let actual = nu!(r#"
nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101] // nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101]
"#); // "#);
assert_eq!(actual.out, "true"); // assert_eq!(actual.out, "true");
} // }
#[test] // #[test]
fn short_stream_mismatch() { // fn short_stream_mismatch() {
let actual = nu!(r#" // let actual = nu!(r#"
nu --testbin repeater (0x[010203]) 5 | bytes starts-with 0x[010204] // nu --testbin repeater (0x[010203]) 5 | bytes starts-with 0x[010204]
"#); // "#);
assert_eq!(actual.out, "false"); // assert_eq!(actual.out, "false");
} // }
#[test] // #[test]
fn short_stream_binary_overflow() { // fn short_stream_binary_overflow() {
let actual = nu!(r#" // let actual = nu!(r#"
nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101010101] // nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101010101]
"#); // "#);
assert_eq!(actual.out, "false"); // assert_eq!(actual.out, "false");
} // }
#[test] // #[test]
fn long_stream_binary() { // fn long_stream_binary() {
let actual = nu!(r#" // let actual = nu!(r#"
nu --testbin repeater (0x[01]) 32768 | bytes starts-with 0x[010101] // nu --testbin repeater (0x[01]) 32768 | bytes starts-with 0x[010101]
"#); // "#);
assert_eq!(actual.out, "true"); // assert_eq!(actual.out, "true");
} // }
#[test] // #[test]
fn long_stream_binary_overflow() { // fn long_stream_binary_overflow() {
// .. ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow // // .. ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
let actual = nu!(r#" // let actual = nu!(r#"
nu --testbin repeater (0x[01]) 32768 | bytes starts-with (0..32768 | each {|| 0x[01] } | bytes collect) // nu --testbin repeater (0x[01]) 32768 | bytes starts-with (0..32768 | each {|| 0x[01] } | bytes collect)
"#); // "#);
assert_eq!(actual.out, "false"); // assert_eq!(actual.out, "false");
} // }
#[test] // #[test]
fn long_stream_binary_exact() { // fn long_stream_binary_exact() {
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow // // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
let actual = nu!(r#" // let actual = nu!(r#"
nu --testbin repeater (0x[01020304]) 8192 | bytes starts-with (0..<8192 | each {|| 0x[01020304] } | bytes collect) // nu --testbin repeater (0x[01020304]) 8192 | bytes starts-with (0..<8192 | each {|| 0x[01020304] } | bytes collect)
"#); // "#);
assert_eq!(actual.out, "true"); // assert_eq!(actual.out, "true");
} // }
#[test] // #[test]
fn long_stream_string_exact() { // fn long_stream_string_exact() {
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow // // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
let actual = nu!(r#" // let actual = nu!(r#"
nu --testbin repeater hell 8192 | bytes starts-with (0..<8192 | each {|| "hell" | into binary } | bytes collect) // nu --testbin repeater hell 8192 | bytes starts-with (0..<8192 | each {|| "hell" | into binary } | bytes collect)
"#); // "#);
assert_eq!(actual.out, "true"); // assert_eq!(actual.out, "true");
} // }
#[test] // #[test]
fn long_stream_mixed_exact() { // fn long_stream_mixed_exact() {
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow // // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
let actual = nu!(r#" // let actual = nu!(r#"
let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect) // let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect) // let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg) // nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg)
"#); // "#);
assert_eq!( // assert_eq!(
actual.err, "", // actual.err, "",
"invocation failed. command line limit likely reached" // "invocation failed. command line limit likely reached"
); // );
assert_eq!(actual.out, "true"); // assert_eq!(actual.out, "true");
} // }
#[test] // #[test]
fn long_stream_mixed_overflow() { // fn long_stream_mixed_overflow() {
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow // // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
let actual = nu!(r#" // let actual = nu!(r#"
let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect) // let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect) // let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg 0x[01]) // nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg 0x[01])
"#); // "#);
assert_eq!( // assert_eq!(
actual.err, "", // actual.err, "",
"invocation failed. command line limit likely reached" // "invocation failed. command line limit likely reached"
); // );
assert_eq!(actual.out, "false"); // assert_eq!(actual.out, "false");
} // }

View File

@ -43,7 +43,7 @@ impl Command for Collect {
stack.captures_to_stack_preserve_out_dest(closure.captures.clone()); stack.captures_to_stack_preserve_out_dest(closure.captures.clone());
let metadata = input.metadata(); let metadata = input.metadata();
let input = input.into_value(call.head); let input = input.into_value(call.head)?;
let mut saved_positional = None; let mut saved_positional = None;
if let Some(var) = block.signature.get_positional(0) { if let Some(var) = block.signature.get_positional(0) {

View File

@ -1,5 +1,5 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::{engine::StateWorkingSet, PipelineMetadata}; use nu_protocol::{engine::StateWorkingSet, ByteStreamSource, PipelineMetadata};
#[derive(Clone)] #[derive(Clone)]
pub struct Describe; pub struct Describe;
@ -162,73 +162,38 @@ fn run(
let metadata = input.metadata(); let metadata = input.metadata();
let description = match input { let description = match input {
PipelineData::ExternalStream { PipelineData::ByteStream(stream, ..) => {
ref stdout, let description = if options.detailed {
ref stderr, let origin = match stream.source() {
ref exit_code, ByteStreamSource::Read(_) => "unknown",
.. ByteStreamSource::File(_) => "file",
} => { ByteStreamSource::Child(_) => "external",
if options.detailed {
let stdout = if stdout.is_some() {
Value::record(
record! {
"type" => Value::string("stream", head),
"origin" => Value::string("external", head),
"subtype" => Value::string("any", head),
},
head,
)
} else {
Value::nothing(head)
};
let stderr = if stderr.is_some() {
Value::record(
record! {
"type" => Value::string("stream", head),
"origin" => Value::string("external", head),
"subtype" => Value::string("any", head),
},
head,
)
} else {
Value::nothing(head)
};
let exit_code = if exit_code.is_some() {
Value::record(
record! {
"type" => Value::string("stream", head),
"origin" => Value::string("external", head),
"subtype" => Value::string("int", head),
},
head,
)
} else {
Value::nothing(head)
}; };
Value::record( Value::record(
record! { record! {
"type" => Value::string("stream", head), "type" => Value::string("byte stream", head),
"origin" => Value::string("external", head), "origin" => Value::string(origin, head),
"stdout" => stdout,
"stderr" => stderr,
"exit_code" => exit_code,
"metadata" => metadata_to_value(metadata, head), "metadata" => metadata_to_value(metadata, head),
}, },
head, head,
) )
} else { } else {
Value::string("raw input", head) Value::string("byte stream", head)
};
if !options.no_collect {
stream.drain()?;
} }
description
} }
PipelineData::ListStream(_, _) => { PipelineData::ListStream(stream, ..) => {
if options.detailed { if options.detailed {
let subtype = if options.no_collect { let subtype = if options.no_collect {
Value::string("any", head) Value::string("any", head)
} else { } else {
describe_value(input.into_value(head), head, engine_state) describe_value(stream.into_value(), head, engine_state)
}; };
Value::record( Value::record(
record! { record! {
@ -242,19 +207,19 @@ fn run(
} else if options.no_collect { } else if options.no_collect {
Value::string("stream", head) Value::string("stream", head)
} else { } else {
let value = input.into_value(head); let value = stream.into_value();
let base_description = value.get_type().to_string(); let base_description = value.get_type().to_string();
Value::string(format!("{} (stream)", base_description), head) Value::string(format!("{} (stream)", base_description), head)
} }
} }
_ => { PipelineData::Value(value, ..) => {
let value = input.into_value(head);
if !options.detailed { if !options.detailed {
Value::string(value.get_type().to_string(), head) Value::string(value.get_type().to_string(), head)
} else { } else {
describe_value(value, head, engine_state) describe_value(value, head, engine_state)
} }
} }
PipelineData::Empty => Value::string(Type::Nothing.to_string(), head),
}; };
Ok(description.into_pipeline_data()) Ok(description.into_pipeline_data())

View File

@ -1,6 +1,13 @@
use nu_engine::{command_prelude::*, get_eval_block_with_early_return, redirect_env}; use nu_engine::{command_prelude::*, get_eval_block_with_early_return, redirect_env};
use nu_protocol::{engine::Closure, ListStream, OutDest, RawStream}; use nu_protocol::{
use std::thread; engine::Closure,
process::{ChildPipe, ChildProcess, ExitStatus},
ByteStream, ByteStreamSource, OutDest,
};
use std::{
io::{Cursor, Read},
thread,
};
#[derive(Clone)] #[derive(Clone)]
pub struct Do; pub struct Do;
@ -86,115 +93,91 @@ impl Command for Do {
} }
match result { match result {
Ok(PipelineData::ExternalStream { Ok(PipelineData::ByteStream(stream, metadata)) if capture_errors => {
stdout, let span = stream.span();
stderr, match stream.into_child() {
exit_code, Ok(mut child) => {
span, // Use a thread to receive stdout message.
metadata, // Or we may get a deadlock if child process sends out too much bytes to stderr.
trim_end_newline, //
}) if capture_errors => { // For example: in normal linux system, stderr pipe's limit is 65535 bytes.
// Use a thread to receive stdout message. // if child process sends out 65536 bytes, the process will be hanged because no consumer
// Or we may get a deadlock if child process sends out too much bytes to stderr. // consumes the first 65535 bytes
// // So we need a thread to receive stdout message, then the current thread can continue to consume
// For example: in normal linux system, stderr pipe's limit is 65535 bytes. // stderr messages.
// if child process sends out 65536 bytes, the process will be hanged because no consumer let stdout_handler = child
// consumes the first 65535 bytes .stdout
// So we need a thread to receive stdout message, then the current thread can continue to consume .take()
// stderr messages. .map(|mut stdout| {
let stdout_handler = stdout thread::Builder::new()
.map(|stdout_stream| { .name("stdout consumer".to_string())
thread::Builder::new() .spawn(move || {
.name("stderr redirector".to_string()) let mut buf = Vec::new();
.spawn(move || { stdout.read_to_end(&mut buf)?;
let ctrlc = stdout_stream.ctrlc.clone(); Ok::<_, ShellError>(buf)
let span = stdout_stream.span; })
RawStream::new( .err_span(head)
Box::new(std::iter::once(
stdout_stream.into_bytes().map(|s| s.item),
)),
ctrlc,
span,
None,
)
}) })
.err_span(head) .transpose()?;
})
.transpose()?;
// Intercept stderr so we can return it in the error if the exit code is non-zero. // Intercept stderr so we can return it in the error if the exit code is non-zero.
// The threading issues mentioned above dictate why we also need to intercept stdout. // The threading issues mentioned above dictate why we also need to intercept stdout.
let mut stderr_ctrlc = None; let stderr_msg = match child.stderr.take() {
let stderr_msg = match stderr { None => String::new(),
None => "".to_string(), Some(mut stderr) => {
Some(stderr_stream) => { let mut buf = String::new();
stderr_ctrlc.clone_from(&stderr_stream.ctrlc); stderr.read_to_string(&mut buf).err_span(span)?;
stderr_stream.into_string().map(|s| s.item)? buf
} }
}; };
let stdout = if let Some(handle) = stdout_handler { let stdout = if let Some(handle) = stdout_handler {
match handle.join() { match handle.join() {
Err(err) => { Err(err) => {
return Err(ShellError::ExternalCommand {
label: "Fail to receive external commands stdout message"
.to_string(),
help: format!("{err:?}"),
span,
});
}
Ok(res) => Some(res?),
}
} else {
None
};
if child.wait()? != ExitStatus::Exited(0) {
return Err(ShellError::ExternalCommand { return Err(ShellError::ExternalCommand {
label: "Fail to receive external commands stdout message" label: "External command failed".to_string(),
.to_string(), help: stderr_msg,
help: format!("{err:?}"),
span, span,
}); });
} }
Ok(res) => Some(res),
}
} else {
None
};
let exit_code: Vec<Value> = match exit_code { let mut child = ChildProcess::from_raw(None, None, None, span);
None => vec![], if let Some(stdout) = stdout {
Some(exit_code_stream) => exit_code_stream.into_iter().collect(), child.stdout = Some(ChildPipe::Tee(Box::new(Cursor::new(stdout))));
}; }
if let Some(Value::Int { val: code, .. }) = exit_code.last() { if !stderr_msg.is_empty() {
if *code != 0 { child.stderr = Some(ChildPipe::Tee(Box::new(Cursor::new(stderr_msg))));
return Err(ShellError::ExternalCommand { }
label: "External command failed".to_string(), Ok(PipelineData::ByteStream(
help: stderr_msg, ByteStream::child(child, span),
span, metadata,
}); ))
} }
Err(stream) => Ok(PipelineData::ByteStream(stream, metadata)),
} }
Ok(PipelineData::ExternalStream {
stdout,
stderr: Some(RawStream::new(
Box::new(std::iter::once(Ok(stderr_msg.into_bytes()))),
stderr_ctrlc,
span,
None,
)),
exit_code: Some(ListStream::new(exit_code.into_iter(), span, None)),
span,
metadata,
trim_end_newline,
})
} }
Ok(PipelineData::ExternalStream { Ok(PipelineData::ByteStream(mut stream, metadata))
stdout, if ignore_program_errors
stderr, && !matches!(caller_stack.stdout(), OutDest::Pipe | OutDest::Capture) =>
exit_code: _,
span,
metadata,
trim_end_newline,
}) if ignore_program_errors
&& !matches!(caller_stack.stdout(), OutDest::Pipe | OutDest::Capture) =>
{ {
Ok(PipelineData::ExternalStream { if let ByteStreamSource::Child(child) = stream.source_mut() {
stdout, child.set_exit_code(0)
stderr, }
exit_code: None, Ok(PipelineData::ByteStream(stream, metadata))
span,
metadata,
trim_end_newline,
})
} }
Ok(PipelineData::Value(Value::Error { .. }, ..)) | Err(_) if ignore_shell_errors => { Ok(PipelineData::Value(Value::Error { .. }, ..)) | Err(_) if ignore_shell_errors => {
Ok(PipelineData::empty()) Ok(PipelineData::empty())

View File

@ -121,12 +121,14 @@ impl Command for For {
Err(err) => { Err(err) => {
return Err(err); return Err(err);
} }
Ok(pipeline) => { Ok(data) => {
let exit_code = pipeline.drain_with_exit_code()?; if let Some(status) = data.drain()? {
if exit_code != 0 { let code = status.code();
return Ok(PipelineData::new_external_stream_with_only_exit_code( if code != 0 {
exit_code, return Ok(
)); PipelineData::new_external_stream_with_only_exit_code(code),
);
}
} }
} }
} }
@ -159,12 +161,14 @@ impl Command for For {
Err(err) => { Err(err) => {
return Err(err); return Err(err);
} }
Ok(pipeline) => { Ok(data) => {
let exit_code = pipeline.drain_with_exit_code()?; if let Some(status) = data.drain()? {
if exit_code != 0 { let code = status.code();
return Ok(PipelineData::new_external_stream_with_only_exit_code( if code != 0 {
exit_code, return Ok(
)); PipelineData::new_external_stream_with_only_exit_code(code),
);
}
} }
} }
} }
@ -173,7 +177,7 @@ impl Command for For {
x => { x => {
stack.add_var(var_id, x); stack.add_var(var_id, x);
eval_block(&engine_state, stack, block, PipelineData::empty())?.into_value(head); eval_block(&engine_state, stack, block, PipelineData::empty())?.into_value(head)?;
} }
} }
Ok(PipelineData::empty()) Ok(PipelineData::empty())

View File

@ -61,7 +61,7 @@ impl Command for Let {
let eval_block = get_eval_block(engine_state); let eval_block = get_eval_block(engine_state);
let stack = &mut stack.start_capture(); let stack = &mut stack.start_capture();
let pipeline_data = eval_block(engine_state, stack, block, input)?; let pipeline_data = eval_block(engine_state, stack, block, input)?;
let value = pipeline_data.into_value(call.head); let value = pipeline_data.into_value(call.head)?;
// if given variable type is Glob, and our result is string // if given variable type is Glob, and our result is string
// then nushell need to convert from Value::String to Value::Glob // then nushell need to convert from Value::String to Value::Glob

View File

@ -53,12 +53,12 @@ impl Command for Loop {
Err(err) => { Err(err) => {
return Err(err); return Err(err);
} }
Ok(pipeline) => { Ok(data) => {
let exit_code = pipeline.drain_with_exit_code()?; if let Some(status) = data.drain()? {
if exit_code != 0 { let code = status.code();
return Ok(PipelineData::new_external_stream_with_only_exit_code( if code != 0 {
exit_code, return Ok(PipelineData::new_external_stream_with_only_exit_code(code));
)); }
} }
} }
} }

View File

@ -61,7 +61,7 @@ impl Command for Mut {
let eval_block = get_eval_block(engine_state); let eval_block = get_eval_block(engine_state);
let stack = &mut stack.start_capture(); let stack = &mut stack.start_capture();
let pipeline_data = eval_block(engine_state, stack, block, input)?; let pipeline_data = eval_block(engine_state, stack, block, input)?;
let value = pipeline_data.into_value(call.head); let value = pipeline_data.into_value(call.head)?;
// if given variable type is Glob, and our result is string // if given variable type is Glob, and our result is string
// then nushell need to convert from Value::String to Value::Glob // then nushell need to convert from Value::String to Value::Glob

View File

@ -62,10 +62,11 @@ impl Command for Try {
} }
// external command may fail to run // external command may fail to run
Ok(pipeline) => { Ok(pipeline) => {
let (pipeline, external_failed) = pipeline.check_external_failed(); let (pipeline, external_failed) = pipeline.check_external_failed()?;
if external_failed { if external_failed {
let exit_code = pipeline.drain_with_exit_code()?; let status = pipeline.drain()?;
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(exit_code, call.head)); let code = status.map(|status| status.code()).unwrap_or(0);
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(code.into(), call.head));
let err_value = Value::nothing(call.head); let err_value = Value::nothing(call.head);
handle_catch(err_value, catch_block, engine_state, stack, eval_block) handle_catch(err_value, catch_block, engine_state, stack, eval_block)
} else { } else {

View File

@ -70,14 +70,16 @@ impl Command for While {
Err(err) => { Err(err) => {
return Err(err); return Err(err);
} }
Ok(pipeline) => { Ok(data) => {
let exit_code = pipeline.drain_with_exit_code()?; if let Some(status) = data.drain()? {
if exit_code != 0 { let code = status.code();
return Ok( if code != 0 {
PipelineData::new_external_stream_with_only_exit_code( return Ok(
exit_code, PipelineData::new_external_stream_with_only_exit_code(
), code,
); ),
);
}
} }
} }
} }

View File

@ -122,10 +122,9 @@ pub fn eval_block(
stack.add_env_var("PWD".to_string(), Value::test_string(cwd.to_string_lossy())); stack.add_env_var("PWD".to_string(), Value::test_string(cwd.to_string_lossy()));
match nu_engine::eval_block::<WithoutDebug>(engine_state, &mut stack, &block, input) { nu_engine::eval_block::<WithoutDebug>(engine_state, &mut stack, &block, input)
Err(err) => panic!("test eval error in `{}`: {:?}", "TODO", err), .and_then(|data| data.into_value(Span::test_data()))
Ok(result) => result.into_value(Span::test_data()), .unwrap_or_else(|err| panic!("test eval error in `{}`: {:?}", "TODO", err))
}
} }
pub fn check_example_evaluates_to_expected_output( pub fn check_example_evaluates_to_expected_output(

View File

@ -58,11 +58,11 @@ impl<'a> StyleComputer<'a> {
Some(ComputableStyle::Closure(closure, span)) => { Some(ComputableStyle::Closure(closure, span)) => {
let result = ClosureEvalOnce::new(self.engine_state, self.stack, closure.clone()) let result = ClosureEvalOnce::new(self.engine_state, self.stack, closure.clone())
.debug(false) .debug(false)
.run_with_value(value.clone()); .run_with_value(value.clone())
.and_then(|data| data.into_value(*span));
match result { match result {
Ok(v) => { Ok(value) => {
let value = v.into_value(*span);
// These should be the same color data forms supported by color_config. // These should be the same color data forms supported by color_config.
match value { match value {
Value::Record { .. } => color_record_to_nustyle(&value), Value::Record { .. } => color_record_to_nustyle(&value),

View File

@ -60,63 +60,13 @@ impl Command for BytesStartsWith {
pattern, pattern,
cell_paths, cell_paths,
}; };
operate(
match input { starts_with,
PipelineData::ExternalStream { arg,
stdout: Some(stream), input,
span, call.head,
.. engine_state.ctrlc.clone(),
} => { )
let mut i = 0;
for item in stream {
let byte_slice = match &item {
// String and binary data are valid byte patterns
Ok(Value::String { val, .. }) => val.as_bytes(),
Ok(Value::Binary { val, .. }) => val,
// If any Error value is output, echo it back
Ok(v @ Value::Error { .. }) => return Ok(v.clone().into_pipeline_data()),
// Unsupported data
Ok(other) => {
return Ok(Value::error(
ShellError::OnlySupportsThisInputType {
exp_input_type: "string and binary".into(),
wrong_type: other.get_type().to_string(),
dst_span: span,
src_span: other.span(),
},
span,
)
.into_pipeline_data());
}
Err(err) => return Err(err.to_owned()),
};
let max = byte_slice.len().min(arg.pattern.len() - i);
if byte_slice[..max] == arg.pattern[i..i + max] {
i += max;
if i >= arg.pattern.len() {
return Ok(Value::bool(true, span).into_pipeline_data());
}
} else {
return Ok(Value::bool(false, span).into_pipeline_data());
}
}
// We reached the end of the stream and never returned,
// the pattern wasn't exhausted so it probably doesn't match
Ok(Value::bool(false, span).into_pipeline_data())
}
_ => operate(
starts_with,
arg,
input,
call.head,
engine_state.ctrlc.clone(),
),
}
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -121,7 +121,7 @@ impl Command for Histogram {
}; };
let span = call.head; let span = call.head;
let data_as_value = input.into_value(span); let data_as_value = input.into_value(span)?;
let value_span = data_as_value.span(); let value_span = data_as_value.span();
// `input` is not a list, here we can return an error. // `input` is not a list, here we can return an error.
run_histogram( run_histogram(

View File

@ -127,25 +127,15 @@ fn into_binary(
let cell_paths = call.rest(engine_state, stack, 0)?; let cell_paths = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths); let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
match input { if let PipelineData::ByteStream(stream, ..) = input {
PipelineData::ExternalStream { stdout: None, .. } => { // TODO: in the future, we may want this to stream out, converting each to bytes
Ok(Value::binary(vec![], head).into_pipeline_data()) Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data())
} } else {
PipelineData::ExternalStream { let args = Arguments {
stdout: Some(stream), cell_paths,
.. compact: call.has_flag(engine_state, stack, "compact")?,
} => { };
// TODO: in the future, we may want this to stream out, converting each to bytes operate(action, args, input, call.head, engine_state.ctrlc.clone())
let output = stream.into_bytes()?;
Ok(Value::binary(output.item, head).into_pipeline_data())
}
_ => {
let args = Arguments {
cell_paths,
compact: call.has_flag(engine_state, stack, "compact")?,
};
operate(action, args, input, call.head, engine_state.ctrlc.clone())
}
} }
} }

View File

@ -101,11 +101,11 @@ fn into_cell_path(call: &Call, input: PipelineData) -> Result<PipelineData, Shel
let list: Vec<_> = stream.into_iter().collect(); let list: Vec<_> = stream.into_iter().collect();
Ok(list_to_cell_path(&list, head)?.into_pipeline_data()) Ok(list_to_cell_path(&list, head)?.into_pipeline_data())
} }
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType { PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, int".into(), exp_input_type: "list, int".into(),
wrong_type: "raw data".into(), wrong_type: "byte stream".into(),
dst_span: head, dst_span: head,
src_span: span, src_span: stream.span(),
}), }),
PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: head }), PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: head }),
} }

View File

@ -82,20 +82,12 @@ fn glob_helper(
let head = call.head; let head = call.head;
let cell_paths = call.rest(engine_state, stack, 0)?; let cell_paths = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths); let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let args = Arguments { cell_paths }; if let PipelineData::ByteStream(stream, ..) = input {
match input { // TODO: in the future, we may want this to stream out, converting each to bytes
PipelineData::ExternalStream { stdout: None, .. } => { Ok(Value::glob(stream.into_string()?, false, head).into_pipeline_data())
Ok(Value::glob(String::new(), false, head).into_pipeline_data()) } else {
} let args = Arguments { cell_paths };
PipelineData::ExternalStream { operate(action, args, input, head, engine_state.ctrlc.clone())
stdout: Some(stream),
..
} => {
// TODO: in the future, we may want this to stream out, converting each to bytes
let output = stream.into_string()?;
Ok(Value::glob(output.item, false, head).into_pipeline_data())
}
_ => operate(action, args, input, head, engine_state.ctrlc.clone()),
} }
} }

View File

@ -108,7 +108,7 @@ fn into_record(
call: &Call, call: &Call,
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let input = input.into_value(call.head); let input = input.into_value(call.head)?;
let input_type = input.get_type(); let input_type = input.get_type();
let span = input.span(); let span = input.span();
let res = match input { let res = match input {

View File

@ -155,26 +155,18 @@ fn string_helper(
} }
let cell_paths = call.rest(engine_state, stack, 0)?; let cell_paths = call.rest(engine_state, stack, 0)?;
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths); let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
let config = engine_state.get_config().clone();
let args = Arguments {
decimals_value,
cell_paths,
config,
};
match input { if let PipelineData::ByteStream(stream, ..) = input {
PipelineData::ExternalStream { stdout: None, .. } => { // TODO: in the future, we may want this to stream out, converting each to bytes
Ok(Value::string(String::new(), head).into_pipeline_data()) Ok(Value::string(stream.into_string()?, head).into_pipeline_data())
} } else {
PipelineData::ExternalStream { let config = engine_state.get_config().clone();
stdout: Some(stream), let args = Arguments {
.. decimals_value,
} => { cell_paths,
// TODO: in the future, we may want this to stream out, converting each to bytes config,
let output = stream.into_string()?; };
Ok(Value::string(output.item, head).into_pipeline_data()) operate(action, args, input, head, engine_state.ctrlc.clone())
}
_ => operate(action, args, input, head, engine_state.ctrlc.clone()),
} }
} }

View File

@ -91,7 +91,7 @@ impl SQLiteDatabase {
} }
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> { pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
let value = input.into_value(span); let value = input.into_value(span)?;
Self::try_from_value(value) Self::try_from_value(value)
} }

View File

@ -29,7 +29,7 @@ impl Command for Inspect {
input: PipelineData, input: PipelineData,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let input_metadata = input.metadata(); let input_metadata = input.metadata();
let input_val = input.into_value(call.head); let input_val = input.into_value(call.head)?;
if input_val.is_nothing() { if input_val.is_nothing() {
return Err(ShellError::PipelineEmpty { return Err(ShellError::PipelineEmpty {
dst_span: call.head, dst_span: call.head,

View File

@ -53,13 +53,12 @@ impl Command for TimeIt {
eval_block(engine_state, stack, block, input)? eval_block(engine_state, stack, block, input)?
} else { } else {
let eval_expression_with_input = get_eval_expression_with_input(engine_state); let eval_expression_with_input = get_eval_expression_with_input(engine_state);
eval_expression_with_input(engine_state, stack, command_to_run, input) eval_expression_with_input(engine_state, stack, command_to_run, input)?.0
.map(|res| res.0)?
} }
} else { } else {
PipelineData::empty() PipelineData::empty()
} }
.into_value(call.head); .into_value(call.head)?;
let end_time = Instant::now(); let end_time = Instant::now();

View File

@ -1,8 +1,8 @@
use super::util::get_rest_for_glob_pattern; use super::util::get_rest_for_glob_pattern;
#[allow(deprecated)] #[allow(deprecated)]
use nu_engine::{command_prelude::*, current_dir, get_eval_block}; use nu_engine::{command_prelude::*, current_dir, get_eval_block};
use nu_protocol::{BufferedReader, DataSource, NuGlob, PipelineMetadata, RawStream}; use nu_protocol::{ByteStream, DataSource, NuGlob, PipelineMetadata};
use std::{io::BufReader, path::Path}; use std::path::Path;
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
use crate::database::SQLiteDatabase; use crate::database::SQLiteDatabase;
@ -143,23 +143,13 @@ impl Command for Open {
} }
}; };
let buf_reader = BufReader::new(file); let stream = PipelineData::ByteStream(
ByteStream::file(file, call_span, ctrlc.clone()),
let file_contents = PipelineData::ExternalStream { Some(PipelineMetadata {
stdout: Some(RawStream::new(
Box::new(BufferedReader::new(buf_reader)),
ctrlc.clone(),
call_span,
None,
)),
stderr: None,
exit_code: None,
span: call_span,
metadata: Some(PipelineMetadata {
data_source: DataSource::FilePath(path.to_path_buf()), data_source: DataSource::FilePath(path.to_path_buf()),
}), }),
trim_end_newline: false, );
};
let exts_opt: Option<Vec<String>> = if raw { let exts_opt: Option<Vec<String>> = if raw {
None None
} else { } else {
@ -184,9 +174,9 @@ impl Command for Open {
let decl = engine_state.get_decl(converter_id); let decl = engine_state.get_decl(converter_id);
let command_output = if let Some(block_id) = decl.get_block_id() { let command_output = if let Some(block_id) = decl.get_block_id() {
let block = engine_state.get_block(block_id); let block = engine_state.get_block(block_id);
eval_block(engine_state, stack, block, file_contents) eval_block(engine_state, stack, block, stream)
} else { } else {
decl.run(engine_state, stack, &Call::new(call_span), file_contents) decl.run(engine_state, stack, &Call::new(call_span), stream)
}; };
output.push(command_output.map_err(|inner| { output.push(command_output.map_err(|inner| {
ShellError::GenericError{ ShellError::GenericError{
@ -198,7 +188,7 @@ impl Command for Open {
} }
})?); })?);
} }
None => output.push(file_contents), None => output.push(stream),
} }
} }
} }

View File

@ -5,12 +5,15 @@ use nu_engine::{command_prelude::*, current_dir};
use nu_path::expand_path_with; use nu_path::expand_path_with;
use nu_protocol::{ use nu_protocol::{
ast::{Expr, Expression}, ast::{Expr, Expression},
DataSource, OutDest, PipelineMetadata, RawStream, byte_stream::copy_with_interrupt,
process::ChildPipe,
ByteStreamSource, DataSource, OutDest, PipelineMetadata,
}; };
use std::{ use std::{
fs::File, fs::File,
io::Write, io::{self, BufRead, BufReader, Read, Write},
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::{atomic::AtomicBool, Arc},
thread, thread,
}; };
@ -104,12 +107,7 @@ impl Command for Save {
}); });
match input { match input {
PipelineData::ExternalStream { PipelineData::ByteStream(stream, metadata) => {
stdout,
stderr,
metadata,
..
} => {
check_saving_to_source_file(metadata.as_ref(), &path, stderr_path.as_ref())?; check_saving_to_source_file(metadata.as_ref(), &path, stderr_path.as_ref())?;
let (file, stderr_file) = get_files( let (file, stderr_file) = get_files(
@ -121,40 +119,97 @@ impl Command for Save {
force, force,
)?; )?;
match (stdout, stderr) { let size = stream.known_size();
(Some(stdout), stderr) => { let ctrlc = engine_state.ctrlc.clone();
// delegate a thread to redirect stderr to result.
let handler = stderr
.map(|stderr| match stderr_file {
Some(stderr_file) => thread::Builder::new()
.name("stderr redirector".to_string())
.spawn(move || {
stream_to_file(stderr, stderr_file, span, progress)
}),
None => thread::Builder::new()
.name("stderr redirector".to_string())
.spawn(move || stderr.drain()),
})
.transpose()
.err_span(span)?;
let res = stream_to_file(stdout, file, span, progress); match stream.into_source() {
if let Some(h) = handler { ByteStreamSource::Read(read) => {
h.join().map_err(|err| ShellError::ExternalCommand { stream_to_file(read, size, ctrlc, file, span, progress)?;
label: "Fail to receive external commands stderr message"
.to_string(),
help: format!("{err:?}"),
span,
})??;
}
res?;
} }
(None, Some(stderr)) => match stderr_file { ByteStreamSource::File(source) => {
Some(stderr_file) => stream_to_file(stderr, stderr_file, span, progress)?, stream_to_file(source, size, ctrlc, file, span, progress)?;
None => stderr.drain()?, }
}, ByteStreamSource::Child(mut child) => {
(None, None) => {} fn write_or_consume_stderr(
}; stderr: ChildPipe,
file: Option<File>,
span: Span,
ctrlc: Option<Arc<AtomicBool>>,
progress: bool,
) -> Result<(), ShellError> {
if let Some(file) = file {
match stderr {
ChildPipe::Pipe(pipe) => {
stream_to_file(pipe, None, ctrlc, file, span, progress)
}
ChildPipe::Tee(tee) => {
stream_to_file(tee, None, ctrlc, file, span, progress)
}
}?
} else {
match stderr {
ChildPipe::Pipe(mut pipe) => {
io::copy(&mut pipe, &mut io::sink())
}
ChildPipe::Tee(mut tee) => io::copy(&mut tee, &mut io::sink()),
}
.err_span(span)?;
}
Ok(())
}
match (child.stdout.take(), child.stderr.take()) {
(Some(stdout), stderr) => {
// delegate a thread to redirect stderr to result.
let handler = stderr
.map(|stderr| {
let ctrlc = ctrlc.clone();
thread::Builder::new().name("stderr saver".into()).spawn(
move || {
write_or_consume_stderr(
stderr,
stderr_file,
span,
ctrlc,
progress,
)
},
)
})
.transpose()
.err_span(span)?;
let res = match stdout {
ChildPipe::Pipe(pipe) => {
stream_to_file(pipe, None, ctrlc, file, span, progress)
}
ChildPipe::Tee(tee) => {
stream_to_file(tee, None, ctrlc, file, span, progress)
}
};
if let Some(h) = handler {
h.join().map_err(|err| ShellError::ExternalCommand {
label: "Fail to receive external commands stderr message"
.to_string(),
help: format!("{err:?}"),
span,
})??;
}
res?;
}
(None, Some(stderr)) => {
write_or_consume_stderr(
stderr,
stderr_file,
span,
ctrlc,
progress,
)?;
}
(None, None) => {}
};
}
}
Ok(PipelineData::Empty) Ok(PipelineData::Empty)
} }
@ -302,8 +357,7 @@ fn input_to_bytes(
) -> Result<Vec<u8>, ShellError> { ) -> Result<Vec<u8>, ShellError> {
let ext = if raw { let ext = if raw {
None None
// if is extern stream , in other words , not value } else if let PipelineData::ByteStream(..) = input {
} else if let PipelineData::ExternalStream { .. } = input {
None None
} else if let PipelineData::Value(Value::String { .. }, ..) = input { } else if let PipelineData::Value(Value::String { .. }, ..) = input {
None None
@ -318,7 +372,7 @@ fn input_to_bytes(
input input
}; };
value_to_bytes(input.into_value(span)) value_to_bytes(input.into_value(span)?)
} }
/// Convert given data into content of file of specified extension if /// Convert given data into content of file of specified extension if
@ -448,84 +502,54 @@ fn get_files(
} }
fn stream_to_file( fn stream_to_file(
mut stream: RawStream, mut source: impl Read,
known_size: Option<u64>,
ctrlc: Option<Arc<AtomicBool>>,
mut file: File, mut file: File,
span: Span, span: Span,
progress: bool, progress: bool,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
// https://github.com/nushell/nushell/pull/9377 contains the reason // https://github.com/nushell/nushell/pull/9377 contains the reason for not using `BufWriter`
// for not using BufWriter<File> if progress {
let writer = &mut file; let mut bytes_processed = 0;
let mut bytes_processed: u64 = 0; let mut bar = progress_bar::NuProgressBar::new(known_size);
let bytes_processed_p = &mut bytes_processed;
let file_total_size = stream.known_size;
let mut process_failed = false;
let process_failed_p = &mut process_failed;
// Create the progress bar // TODO: reduce the number of progress bar updates?
// It looks a bit messy but I am doing it this way to avoid
// creating the bar when is not needed
let (mut bar_opt, bar_opt_clone) = if progress {
let tmp_bar = progress_bar::NuProgressBar::new(file_total_size);
let tmp_bar_clone = tmp_bar.clone();
(Some(tmp_bar), Some(tmp_bar_clone)) let mut reader = BufReader::new(source);
} else {
(None, None)
};
stream.try_for_each(move |result| { let res = loop {
let buf = match result { if nu_utils::ctrl_c::was_pressed(&ctrlc) {
Ok(v) => match v { bar.abandoned_msg("# Cancelled #".to_owned());
Value::String { val, .. } => val.into_bytes(), return Ok(());
Value::Binary { val, .. } => val, }
// Propagate errors by explicitly matching them before the final case.
Value::Error { error, .. } => return Err(*error), match reader.fill_buf() {
other => { Ok(&[]) => break Ok(()),
return Err(ShellError::OnlySupportsThisInputType { Ok(buf) => {
exp_input_type: "string or binary".into(), file.write_all(buf).err_span(span)?;
wrong_type: other.get_type().to_string(), let len = buf.len();
dst_span: span, reader.consume(len);
src_span: other.span(), bytes_processed += len as u64;
}); bar.update_bar(bytes_processed);
} }
}, Err(e) if e.kind() == io::ErrorKind::Interrupted => continue,
Err(err) => { Err(e) => break Err(e),
*process_failed_p = true;
return Err(err);
} }
}; };
// If the `progress` flag is set then
if progress {
// Update the total amount of bytes that has been saved and then print the progress bar
*bytes_processed_p += buf.len() as u64;
if let Some(bar) = &mut bar_opt {
bar.update_bar(*bytes_processed_p);
}
}
if let Err(err) = writer.write_all(&buf) {
*process_failed_p = true;
return Err(ShellError::IOError {
msg: err.to_string(),
});
}
Ok(())
})?;
// If the `progress` flag is set then
if progress {
// If the process failed, stop the progress bar with an error message. // If the process failed, stop the progress bar with an error message.
if process_failed { if let Err(err) = res {
if let Some(bar) = bar_opt_clone { let _ = file.flush();
bar.abandoned_msg("# Error while saving #".to_owned()); bar.abandoned_msg("# Error while saving #".to_owned());
} Err(err.into_spanned(span).into())
} else {
file.flush().err_span(span)?;
Ok(())
} }
} else {
copy_with_interrupt(&mut source, &mut file, span, ctrlc.as_deref())?;
Ok(())
} }
file.flush()?;
Ok(())
} }

View File

@ -125,13 +125,11 @@ fn getcol(head: Span, input: PipelineData) -> Result<PipelineData, ShellError> {
.into_pipeline_data() .into_pipeline_data()
.set_metadata(metadata)) .set_metadata(metadata))
} }
PipelineData::ExternalStream { .. } => Err(ShellError::OnlySupportsThisInputType { PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record or table".into(), exp_input_type: "record or table".into(),
wrong_type: "raw data".into(), wrong_type: "byte stream".into(),
dst_span: head, dst_span: head,
src_span: input src_span: stream.span(),
.span()
.expect("PipelineData::ExternalStream had no span"),
}), }),
} }
} }

View File

@ -133,11 +133,11 @@ fn drop_cols(
} }
} }
PipelineData::Empty => Ok(PipelineData::Empty), PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType { PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "table or record".into(), exp_input_type: "table or record".into(),
wrong_type: "raw data".into(), wrong_type: "byte stream".into(),
dst_span: head, dst_span: head,
src_span: span, src_span: stream.span(),
}), }),
} }
} }

View File

@ -129,7 +129,9 @@ with 'transpose' first."#
} }
Some(Value::list(vals, span)) Some(Value::list(vals, span))
} }
Ok(data) => Some(data.into_value(head)), Ok(data) => Some(data.into_value(head).unwrap_or_else(|err| {
Value::error(chain_error_with_input(err, is_error, span), span)
})),
Err(ShellError::Continue { span }) => Some(Value::nothing(span)), Err(ShellError::Continue { span }) => Some(Value::nothing(span)),
Err(ShellError::Break { .. }) => None, Err(ShellError::Break { .. }) => None,
Err(error) => { Err(error) => {
@ -140,37 +142,39 @@ with 'transpose' first."#
}) })
.into_pipeline_data(head, engine_state.ctrlc.clone())) .into_pipeline_data(head, engine_state.ctrlc.clone()))
} }
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()), PipelineData::ByteStream(stream, ..) => {
PipelineData::ExternalStream { if let Some(chunks) = stream.chunks() {
stdout: Some(stream), let mut closure = ClosureEval::new(engine_state, stack, closure);
.. Ok(chunks
} => { .map_while(move |value| {
let mut closure = ClosureEval::new(engine_state, stack, closure); let value = match value {
Ok(stream Ok(value) => value,
.into_iter() Err(ShellError::Continue { span }) => {
.map_while(move |value| { return Some(Value::nothing(span))
let value = match value { }
Ok(value) => value, Err(ShellError::Break { .. }) => return None,
Err(ShellError::Continue { span }) => { Err(err) => return Some(Value::error(err, head)),
return Some(Value::nothing(span)) };
}
Err(ShellError::Break { .. }) => return None,
Err(err) => return Some(Value::error(err, head)),
};
let span = value.span(); let span = value.span();
let is_error = value.is_error(); let is_error = value.is_error();
match closure.run_with_value(value) { match closure
Ok(data) => Some(data.into_value(head)), .run_with_value(value)
Err(ShellError::Continue { span }) => Some(Value::nothing(span)), .and_then(|data| data.into_value(head))
Err(ShellError::Break { .. }) => None, {
Err(error) => { Ok(value) => Some(value),
let error = chain_error_with_input(error, is_error, span); Err(ShellError::Continue { span }) => Some(Value::nothing(span)),
Some(Value::error(error, span)) Err(ShellError::Break { .. }) => None,
Err(error) => {
let error = chain_error_with_input(error, is_error, span);
Some(Value::error(error, span))
}
} }
} })
}) .into_pipeline_data(head, engine_state.ctrlc.clone()))
.into_pipeline_data(head, engine_state.ctrlc.clone())) } else {
Ok(PipelineData::Empty)
}
} }
// This match allows non-iterables to be accepted, // This match allows non-iterables to be accepted,
// which is currently considered undesirable (Nov 2022). // which is currently considered undesirable (Nov 2022).

View File

@ -1,4 +1,5 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use std::io::Read;
pub fn empty( pub fn empty(
engine_state: &EngineState, engine_state: &EngineState,
@ -36,29 +37,26 @@ pub fn empty(
} else { } else {
match input { match input {
PipelineData::Empty => Ok(PipelineData::Empty), PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::ExternalStream { stdout, .. } => match stdout { PipelineData::ByteStream(stream, ..) => {
Some(s) => { let span = stream.span();
let bytes = s.into_bytes(); match stream.reader() {
Some(reader) => {
match bytes { let is_empty = reader.bytes().next().transpose().err_span(span)?.is_none();
Ok(s) => { if negate {
if negate { Ok(Value::bool(!is_empty, head).into_pipeline_data())
Ok(Value::bool(!s.item.is_empty(), head).into_pipeline_data()) } else {
} else { Ok(Value::bool(is_empty, head).into_pipeline_data())
Ok(Value::bool(s.item.is_empty(), head).into_pipeline_data()) }
} }
None => {
if negate {
Ok(Value::bool(false, head).into_pipeline_data())
} else {
Ok(Value::bool(true, head).into_pipeline_data())
} }
Err(err) => Err(err),
} }
} }
None => { }
if negate {
Ok(Value::bool(false, head).into_pipeline_data())
} else {
Ok(Value::bool(true, head).into_pipeline_data())
}
}
},
PipelineData::ListStream(s, ..) => { PipelineData::ListStream(s, ..) => {
let empty = s.into_iter().next().is_none(); let empty = s.into_iter().next().is_none();
if negate { if negate {

View File

@ -58,33 +58,13 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
| PipelineData::ListStream(..) => { | PipelineData::ListStream(..) => {
let mut closure = ClosureEval::new(engine_state, stack, closure); let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(input Ok(input
.into_iter()
.filter_map(move |value| match closure.run_with_value(value.clone()) {
Ok(pred) => pred.into_value(head).is_true().then_some(value),
Err(err) => {
let span = value.span();
let err = chain_error_with_input(err, value.is_error(), span);
Some(Value::error(err, span))
}
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
}
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => {
let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(stream
.into_iter() .into_iter()
.filter_map(move |value| { .filter_map(move |value| {
let value = match value { match closure
Ok(value) => value, .run_with_value(value.clone())
Err(err) => return Some(Value::error(err, head)), .and_then(|data| data.into_value(head))
}; {
Ok(cond) => cond.is_true().then_some(value),
match closure.run_with_value(value.clone()) {
Ok(pred) => pred.into_value(head).is_true().then_some(value),
Err(err) => { Err(err) => {
let span = value.span(); let span = value.span();
let err = chain_error_with_input(err, value.is_error(), span); let err = chain_error_with_input(err, value.is_error(), span);
@ -94,14 +74,43 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
}) })
.into_pipeline_data(head, engine_state.ctrlc.clone())) .into_pipeline_data(head, engine_state.ctrlc.clone()))
} }
PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() {
let mut closure = ClosureEval::new(engine_state, stack, closure);
Ok(chunks
.into_iter()
.filter_map(move |value| {
let value = match value {
Ok(value) => value,
Err(err) => return Some(Value::error(err, head)),
};
match closure
.run_with_value(value.clone())
.and_then(|data| data.into_value(head))
{
Ok(cond) => cond.is_true().then_some(value),
Err(err) => {
let span = value.span();
let err = chain_error_with_input(err, value.is_error(), span);
Some(Value::error(err, span))
}
}
})
.into_pipeline_data(head, engine_state.ctrlc.clone()))
} else {
Ok(PipelineData::Empty)
}
}
// This match allows non-iterables to be accepted, // This match allows non-iterables to be accepted,
// which is currently considered undesirable (Nov 2022). // which is currently considered undesirable (Nov 2022).
PipelineData::Value(value, ..) => { PipelineData::Value(value, ..) => {
let result = ClosureEvalOnce::new(engine_state, stack, closure) let result = ClosureEvalOnce::new(engine_state, stack, closure)
.run_with_value(value.clone()); .run_with_value(value.clone())
.and_then(|data| data.into_value(head));
Ok(match result { Ok(match result {
Ok(pred) => pred.into_value(head).is_true().then_some(value), Ok(cond) => cond.is_true().then_some(value),
Err(err) => { Err(err) => {
let span = value.span(); let span = value.span();
let err = chain_error_with_input(err, value.is_error(), span); let err = chain_error_with_input(err, value.is_error(), span);

View File

@ -447,57 +447,35 @@ fn find_with_rest_and_highlight(
Ok(PipelineData::ListStream(stream, metadata)) Ok(PipelineData::ListStream(stream, metadata))
} }
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()), PipelineData::ByteStream(stream, ..) => {
PipelineData::ExternalStream { let span = stream.span();
stdout: Some(stream), if let Some(lines) = stream.lines() {
.. let terms = lower_terms
} => { .into_iter()
let mut output: Vec<Value> = vec![]; .map(|term| term.to_expanded_string("", &filter_config).to_lowercase())
for filter_val in stream { .collect::<Vec<_>>();
match filter_val {
Ok(value) => {
let span = value.span();
match value {
Value::String { val, .. } => {
let split_char = if val.contains("\r\n") { "\r\n" } else { "\n" };
for line in val.split(split_char) { let mut output: Vec<Value> = vec![];
for term in lower_terms.iter() { for line in lines {
let term_str = term.to_expanded_string("", &filter_config); let line = line?.to_lowercase();
let lower_val = line.to_lowercase(); for term in &terms {
if lower_val.contains( if line.contains(term) {
&term.to_expanded_string("", &config).to_lowercase(), output.push(Value::string(
) { highlight_search_string(
output.push(Value::string( &line,
highlight_search_string( term,
line, &string_style,
&term_str, &highlight_style,
&string_style, )?,
&highlight_style, span,
)?, ))
span,
))
}
}
}
}
// Propagate errors by explicitly matching them before the final case.
Value::Error { error, .. } => return Err(*error),
other => {
return Err(ShellError::UnsupportedInput {
msg: "unsupported type from raw stream".into(),
input: format!("input: {:?}", other.get_type()),
msg_span: span,
input_span: other.span(),
});
}
} }
} }
// Propagate any errors that were in the stream }
Err(e) => return Err(e), Ok(Value::list(output, span).into_pipeline_data())
}; } else {
Ok(PipelineData::Empty)
} }
Ok(output.into_pipeline_data(span, ctrlc))
} }
} }
} }

View File

@ -170,11 +170,11 @@ fn first_helper(
)) ))
} }
} }
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType { PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(), exp_input_type: "list, binary or range".into(),
wrong_type: "raw data".into(), wrong_type: "byte stream".into(),
dst_span: head, dst_span: head,
src_span: span, src_span: stream.span(),
}), }),
PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType { PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(), exp_input_type: "list, binary or range".into(),

View File

@ -81,7 +81,7 @@ If multiple cell paths are given, this will produce a list of values."#
let paths = std::iter::once(cell_path).chain(rest); let paths = std::iter::once(cell_path).chain(rest);
let input = input.into_value(span); let input = input.into_value(span)?;
for path in paths { for path in paths {
let val = input.clone().follow_cell_path(&path.members, !sensitive); let val = input.clone().follow_cell_path(&path.members, !sensitive);

View File

@ -207,7 +207,7 @@ fn group_closure(
for value in values { for value in values {
let key = closure let key = closure
.run_with_value(value.clone())? .run_with_value(value.clone())?
.into_value(span) .into_value(span)?
.coerce_into_string()?; .coerce_into_string()?;
groups.entry(key).or_default().push(value); groups.entry(key).or_default().push(value);

View File

@ -66,7 +66,7 @@ impl Command for Headers {
let config = engine_state.get_config(); let config = engine_state.get_config();
let metadata = input.metadata(); let metadata = input.metadata();
let span = input.span().unwrap_or(call.head); let span = input.span().unwrap_or(call.head);
let value = input.into_value(span); let value = input.into_value(span)?;
let Value::List { vals: table, .. } = value else { let Value::List { vals: table, .. } = value else {
return Err(ShellError::TypeMismatch { return Err(ShellError::TypeMismatch {
err_message: "not a table".to_string(), err_message: "not a table".to_string(),

View File

@ -190,7 +190,7 @@ fn insert(
let value = value.unwrap_or(Value::nothing(head)); let value = value.unwrap_or(Value::nothing(head));
let new_value = ClosureEvalOnce::new(engine_state, stack, *val) let new_value = ClosureEvalOnce::new(engine_state, stack, *val)
.run_with_value(value.clone())? .run_with_value(value.clone())?
.into_value(head); .into_value(head)?;
pre_elems.push(new_value); pre_elems.push(new_value);
if !end_of_stream { if !end_of_stream {
@ -261,8 +261,8 @@ fn insert(
type_name: "empty pipeline".to_string(), type_name: "empty pipeline".to_string(),
span: head, span: head,
}), }),
PipelineData::ExternalStream { .. } => Err(ShellError::IncompatiblePathAccess { PipelineData::ByteStream(..) => Err(ShellError::IncompatiblePathAccess {
type_name: "external stream".to_string(), type_name: "byte stream".to_string(),
span: head, span: head,
}), }),
} }
@ -284,7 +284,7 @@ fn insert_value_by_closure(
value.clone() value.clone()
}; };
let new_value = closure.run_with_value(value_at_path)?.into_value(span); let new_value = closure.run_with_value(value_at_path)?.into_value(span)?;
value.insert_data_at_cell_path(cell_path, new_value, span) value.insert_data_at_cell_path(cell_path, new_value, span)
} }
@ -304,7 +304,7 @@ fn insert_single_value_by_closure(
value.clone() value.clone()
}; };
let new_value = closure.run_with_value(value_at_path)?.into_value(span); let new_value = closure.run_with_value(value_at_path)?.into_value(span)?;
value.insert_data_at_cell_path(cell_path, new_value, span) value.insert_data_at_cell_path(cell_path, new_value, span)
} }

View File

@ -55,10 +55,11 @@ impl Command for Items {
let result = closure let result = closure
.add_arg(Value::string(col, span)) .add_arg(Value::string(col, span))
.add_arg(val) .add_arg(val)
.run_with_input(PipelineData::Empty); .run_with_input(PipelineData::Empty)
.and_then(|data| data.into_value(head));
match result { match result {
Ok(data) => Some(data.into_value(head)), Ok(value) => Some(value),
Err(ShellError::Break { .. }) => None, Err(ShellError::Break { .. }) => None,
Err(err) => { Err(err) => {
let err = chain_error_with_input(err, false, span); let err = chain_error_with_input(err, false, span);
@ -77,20 +78,18 @@ impl Command for Items {
}), }),
} }
} }
PipelineData::ListStream(..) => Err(ShellError::OnlySupportsThisInputType { PipelineData::ListStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record".into(), exp_input_type: "record".into(),
wrong_type: "stream".into(), wrong_type: "stream".into(),
dst_span: head, dst_span: call.head,
src_span: head, src_span: stream.span(),
}),
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record".into(),
wrong_type: "byte stream".into(),
dst_span: call.head,
src_span: stream.span(),
}), }),
PipelineData::ExternalStream { span, .. } => {
Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record".into(),
wrong_type: "raw data".into(),
dst_span: head,
src_span: span,
})
}
} }
.map(|data| data.set_metadata(metadata)) .map(|data| data.set_metadata(metadata))
} }

View File

@ -75,7 +75,7 @@ impl Command for Join {
let join_type = join_type(engine_state, stack, call)?; let join_type = join_type(engine_state, stack, call)?;
// FIXME: we should handle ListStreams properly instead of collecting // FIXME: we should handle ListStreams properly instead of collecting
let collected_input = input.into_value(span); let collected_input = input.into_value(span)?;
match (&collected_input, &table_2, &l_on, &r_on) { match (&collected_input, &table_2, &l_on, &r_on) {
( (

View File

@ -160,14 +160,12 @@ impl Command for Last {
}), }),
} }
} }
PipelineData::ExternalStream { span, .. } => { PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
Err(ShellError::OnlySupportsThisInputType { exp_input_type: "list, binary or range".into(),
exp_input_type: "list, binary or range".into(), wrong_type: "byte stream".into(),
wrong_type: "raw data".into(), dst_span: head,
dst_span: head, src_span: stream.span(),
src_span: span, }),
})
}
PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType { PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(), exp_input_type: "list, binary or range".into(),
wrong_type: "null".into(), wrong_type: "null".into(),

View File

@ -1,6 +1,4 @@
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::RawStream;
use std::collections::VecDeque;
#[derive(Clone)] #[derive(Clone)]
pub struct Lines; pub struct Lines;
@ -33,23 +31,33 @@ impl Command for Lines {
let span = input.span().unwrap_or(call.head); let span = input.span().unwrap_or(call.head);
match input { match input {
PipelineData::Value(Value::String { val, .. }, ..) => { PipelineData::Value(value, ..) => match value {
let lines = if skip_empty { Value::String { val, .. } => {
val.lines() let lines = if skip_empty {
.filter_map(|s| { val.lines()
if s.trim().is_empty() { .filter_map(|s| {
None if s.trim().is_empty() {
} else { None
Some(Value::string(s, span)) } else {
} Some(Value::string(s, span))
}) }
.collect() })
} else { .collect()
val.lines().map(|s| Value::string(s, span)).collect() } else {
}; val.lines().map(|s| Value::string(s, span)).collect()
};
Ok(Value::list(lines, span).into_pipeline_data()) Ok(Value::list(lines, span).into_pipeline_data())
} }
// Propagate existing errors
Value::Error { error, .. } => Err(*error),
value => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "string or byte stream".into(),
wrong_type: value.get_type().to_string(),
dst_span: head,
src_span: value.span(),
}),
},
PipelineData::Empty => Ok(PipelineData::Empty), PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::ListStream(stream, metadata) => { PipelineData::ListStream(stream, metadata) => {
let stream = stream.modify(|iter| { let stream = stream.modify(|iter| {
@ -76,27 +84,18 @@ impl Command for Lines {
Ok(PipelineData::ListStream(stream, metadata)) Ok(PipelineData::ListStream(stream, metadata))
} }
PipelineData::Value(val, ..) => { PipelineData::ByteStream(stream, ..) => {
match val { if let Some(lines) = stream.lines() {
// Propagate existing errors Ok(lines
Value::Error { error, .. } => Err(*error), .map(move |line| match line {
_ => Err(ShellError::OnlySupportsThisInputType { Ok(line) => Value::string(line, head),
exp_input_type: "string or raw data".into(), Err(err) => Value::error(err, head),
wrong_type: val.get_type().to_string(), })
dst_span: head, .into_pipeline_data(head, ctrlc))
src_span: val.span(), } else {
}), Ok(PipelineData::empty())
} }
} }
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
metadata,
..
} => Ok(RawStreamLinesAdapter::new(stream, head, skip_empty)
.map(move |x| x.unwrap_or_else(|err| Value::error(err, head)))
.into_pipeline_data(head, ctrlc)
.set_metadata(metadata)),
} }
} }
@ -112,108 +111,6 @@ impl Command for Lines {
} }
} }
#[derive(Debug)]
struct RawStreamLinesAdapter {
inner: RawStream,
inner_complete: bool,
skip_empty: bool,
span: Span,
incomplete_line: String,
queue: VecDeque<String>,
}
impl Iterator for RawStreamLinesAdapter {
type Item = Result<Value, ShellError>;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some(s) = self.queue.pop_front() {
if self.skip_empty && s.trim().is_empty() {
continue;
}
return Some(Ok(Value::string(s, self.span)));
} else {
// inner is complete, feed out remaining state
if self.inner_complete {
return if self.incomplete_line.is_empty() {
None
} else {
Some(Ok(Value::string(
std::mem::take(&mut self.incomplete_line),
self.span,
)))
};
}
// pull more data from inner
if let Some(result) = self.inner.next() {
match result {
Ok(v) => {
let span = v.span();
match v {
// TODO: Value::Binary support required?
Value::String { val, .. } => {
self.span = span;
let mut lines = val.lines();
// handle incomplete line from previous
if !self.incomplete_line.is_empty() {
if let Some(first) = lines.next() {
self.incomplete_line.push_str(first);
self.queue.push_back(std::mem::take(
&mut self.incomplete_line,
));
}
}
// save completed lines
self.queue.extend(lines.map(String::from));
if !val.ends_with('\n') {
// incomplete line, save for next time
// if `val` and `incomplete_line` were empty,
// then pop will return none
if let Some(s) = self.queue.pop_back() {
self.incomplete_line = s;
}
}
}
// Propagate errors by explicitly matching them before the final case.
Value::Error { error, .. } => return Some(Err(*error)),
other => {
return Some(Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "string".into(),
wrong_type: other.get_type().to_string(),
dst_span: self.span,
src_span: other.span(),
}));
}
}
}
Err(err) => return Some(Err(err)),
}
} else {
self.inner_complete = true;
}
}
}
}
}
impl RawStreamLinesAdapter {
pub fn new(inner: RawStream, span: Span, skip_empty: bool) -> Self {
Self {
inner,
span,
skip_empty,
incomplete_line: String::new(),
queue: VecDeque::new(),
inner_complete: false,
}
}
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;

View File

@ -143,17 +143,16 @@ impl Command for ParEach {
.map(move |(index, value)| { .map(move |(index, value)| {
let span = value.span(); let span = value.span();
let is_error = value.is_error(); let is_error = value.is_error();
let result = let value =
ClosureEvalOnce::new(engine_state, stack, closure.clone()) ClosureEvalOnce::new(engine_state, stack, closure.clone())
.run_with_value(value); .run_with_value(value)
.and_then(|data| data.into_value(span))
let value = match result { .unwrap_or_else(|err| {
Ok(data) => data.into_value(span), Value::error(
Err(err) => Value::error( chain_error_with_input(err, is_error, span),
chain_error_with_input(err, is_error, span), span,
span, )
), });
};
(index, value) (index, value)
}) })
@ -170,17 +169,16 @@ impl Command for ParEach {
.map(move |(index, value)| { .map(move |(index, value)| {
let span = value.span(); let span = value.span();
let is_error = value.is_error(); let is_error = value.is_error();
let result = let value =
ClosureEvalOnce::new(engine_state, stack, closure.clone()) ClosureEvalOnce::new(engine_state, stack, closure.clone())
.run_with_value(value); .run_with_value(value)
.and_then(|data| data.into_value(span))
let value = match result { .unwrap_or_else(|err| {
Ok(data) => data.into_value(span), Value::error(
Err(err) => Value::error( chain_error_with_input(err, is_error, span),
chain_error_with_input(err, is_error, span), span,
span, )
), });
};
(index, value) (index, value)
}) })
@ -203,40 +201,12 @@ impl Command for ParEach {
.map(move |(index, value)| { .map(move |(index, value)| {
let span = value.span(); let span = value.span();
let is_error = value.is_error(); let is_error = value.is_error();
let result = ClosureEvalOnce::new(engine_state, stack, closure.clone())
.run_with_value(value);
let value = match result {
Ok(data) => data.into_value(head),
Err(err) => {
Value::error(chain_error_with_input(err, is_error, span), span)
}
};
(index, value)
})
.collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
})),
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
PipelineData::ExternalStream {
stdout: Some(stream),
..
} => Ok(create_pool(max_threads)?.install(|| {
let vec = stream
.enumerate()
.par_bridge()
.map(move |(index, value)| {
let value = match value {
Ok(value) => value,
Err(err) => return (index, Value::error(err, head)),
};
let value = ClosureEvalOnce::new(engine_state, stack, closure.clone()) let value = ClosureEvalOnce::new(engine_state, stack, closure.clone())
.run_with_value(value) .run_with_value(value)
.map(|data| data.into_value(head)) .and_then(|data| data.into_value(head))
.unwrap_or_else(|err| Value::error(err, head)); .unwrap_or_else(|err| {
Value::error(chain_error_with_input(err, is_error, span), span)
});
(index, value) (index, value)
}) })
@ -244,6 +214,34 @@ impl Command for ParEach {
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone()) apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
})), })),
PipelineData::ByteStream(stream, ..) => {
if let Some(chunks) = stream.chunks() {
Ok(create_pool(max_threads)?.install(|| {
let vec = chunks
.enumerate()
.par_bridge()
.map(move |(index, value)| {
let value = match value {
Ok(value) => value,
Err(err) => return (index, Value::error(err, head)),
};
let value =
ClosureEvalOnce::new(engine_state, stack, closure.clone())
.run_with_value(value)
.and_then(|data| data.into_value(head))
.unwrap_or_else(|err| Value::error(err, head));
(index, value)
})
.collect::<Vec<_>>();
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
}))
} else {
Ok(PipelineData::empty())
}
}
} }
.and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.ctrlc.clone())) .and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.ctrlc.clone()))
.map(|data| data.set_metadata(metadata)) .map(|data| data.set_metadata(metadata))

View File

@ -115,7 +115,7 @@ impl Command for Reduce {
.add_arg(value) .add_arg(value)
.add_arg(acc) .add_arg(acc)
.run_with_input(PipelineData::Empty)? .run_with_input(PipelineData::Empty)?
.into_value(head); .into_value(head)?;
} }
Ok(acc.with_span(head).into_pipeline_data()) Ok(acc.with_span(head).into_pipeline_data())

View File

@ -173,7 +173,7 @@ fn reject(
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let mut unique_rows: HashSet<usize> = HashSet::new(); let mut unique_rows: HashSet<usize> = HashSet::new();
let metadata = input.metadata(); let metadata = input.metadata();
let val = input.into_value(span); let val = input.into_value(span)?;
let mut val = val; let mut val = val;
let mut new_columns = vec![]; let mut new_columns = vec![];
let mut new_rows = vec![]; let mut new_rows = vec![];

View File

@ -87,15 +87,14 @@ impl Command for Skip {
let ctrlc = engine_state.ctrlc.clone(); let ctrlc = engine_state.ctrlc.clone();
let input_span = input.span().unwrap_or(call.head); let input_span = input.span().unwrap_or(call.head);
match input { match input {
PipelineData::ExternalStream { .. } => Err(ShellError::OnlySupportsThisInputType { PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(), exp_input_type: "list, binary or range".into(),
wrong_type: "raw data".into(), wrong_type: "byte stream".into(),
dst_span: call.head, dst_span: call.head,
src_span: input_span, src_span: stream.span(),
}), }),
PipelineData::Value(Value::Binary { val, .. }, metadata) => { PipelineData::Value(Value::Binary { val, .. }, metadata) => {
let bytes = val.into_iter().skip(n).collect::<Vec<_>>(); let bytes = val.into_iter().skip(n).collect::<Vec<_>>();
Ok(Value::binary(bytes, input_span).into_pipeline_data_with_metadata(metadata)) Ok(Value::binary(bytes, input_span).into_pipeline_data_with_metadata(metadata))
} }
_ => Ok(input _ => Ok(input

View File

@ -85,7 +85,8 @@ impl Command for SkipUntil {
.skip_while(move |value| { .skip_while(move |value| {
closure closure
.run_with_value(value.clone()) .run_with_value(value.clone())
.map(|data| data.into_value(head).is_false()) .and_then(|data| data.into_value(head))
.map(|cond| cond.is_false())
.unwrap_or(false) .unwrap_or(false)
}) })
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata)) .into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))

View File

@ -90,7 +90,8 @@ impl Command for SkipWhile {
.skip_while(move |value| { .skip_while(move |value| {
closure closure
.run_with_value(value.clone()) .run_with_value(value.clone())
.map(|data| data.into_value(head).is_true()) .and_then(|data| data.into_value(head))
.map(|cond| cond.is_true())
.unwrap_or(false) .unwrap_or(false)
}) })
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata)) .into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))

View File

@ -78,14 +78,12 @@ impl Command for Take {
stream.modify(|iter| iter.take(rows_desired)), stream.modify(|iter| iter.take(rows_desired)),
metadata, metadata,
)), )),
PipelineData::ExternalStream { span, .. } => { PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
Err(ShellError::OnlySupportsThisInputType { exp_input_type: "list, binary or range".into(),
exp_input_type: "list, binary or range".into(), wrong_type: "byte stream".into(),
wrong_type: "raw data".into(), dst_span: head,
dst_span: head, src_span: stream.span(),
src_span: span, }),
})
}
PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType { PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "list, binary or range".into(), exp_input_type: "list, binary or range".into(),
wrong_type: "null".into(), wrong_type: "null".into(),

View File

@ -81,7 +81,8 @@ impl Command for TakeUntil {
.take_while(move |value| { .take_while(move |value| {
closure closure
.run_with_value(value.clone()) .run_with_value(value.clone())
.map(|data| data.into_value(head).is_false()) .and_then(|data| data.into_value(head))
.map(|cond| cond.is_false())
.unwrap_or(false) .unwrap_or(false)
}) })
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata)) .into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))

View File

@ -81,7 +81,8 @@ impl Command for TakeWhile {
.take_while(move |value| { .take_while(move |value| {
closure closure
.run_with_value(value.clone()) .run_with_value(value.clone())
.map(|data| data.into_value(head).is_true()) .and_then(|data| data.into_value(head))
.map(|cond| cond.is_true())
.unwrap_or(false) .unwrap_or(false)
}) })
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata)) .into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))

View File

@ -1,6 +1,17 @@
use nu_engine::{command_prelude::*, get_eval_block_with_early_return}; use nu_engine::{command_prelude::*, get_eval_block_with_early_return};
use nu_protocol::{engine::Closure, OutDest, RawStream}; use nu_protocol::{
use std::{sync::mpsc, thread}; byte_stream::copy_with_interrupt, engine::Closure, process::ChildPipe, ByteStream,
ByteStreamSource, OutDest,
};
use std::{
io::{self, Read, Write},
sync::{
atomic::AtomicBool,
mpsc::{self, Sender},
Arc,
},
thread::{self, JoinHandle},
};
#[derive(Clone)] #[derive(Clone)]
pub struct Tee; pub struct Tee;
@ -67,138 +78,205 @@ use it in your pipeline."#
let head = call.head; let head = call.head;
let use_stderr = call.has_flag(engine_state, stack, "stderr")?; let use_stderr = call.has_flag(engine_state, stack, "stderr")?;
let Spanned { let closure: Spanned<Closure> = call.req(engine_state, stack, 0)?;
item: Closure { block_id, captures }, let closure_span = closure.span;
span: closure_span, let closure = closure.item;
} = call.req(engine_state, stack, 0)?;
let closure_engine_state = engine_state.clone(); let mut eval_block = {
let mut closure_stack = stack let closure_engine_state = engine_state.clone();
.captures_to_stack_preserve_out_dest(captures) let mut closure_stack = stack
.reset_pipes(); .captures_to_stack_preserve_out_dest(closure.captures)
.reset_pipes();
let eval_block_with_early_return = get_eval_block_with_early_return(engine_state);
let metadata = input.metadata(); move |input| {
let metadata_clone = metadata.clone(); let result = eval_block_with_early_return(
&closure_engine_state,
&mut closure_stack,
closure_engine_state.get_block(closure.block_id),
input,
);
// Make sure to drain any iterator produced to avoid unexpected behavior
result.and_then(|data| data.drain().map(|_| ()))
}
};
let eval_block_with_early_return = get_eval_block_with_early_return(engine_state); if let PipelineData::ByteStream(stream, metadata) = input {
let span = stream.span();
let ctrlc = engine_state.ctrlc.clone();
let eval_block = {
let metadata = metadata.clone();
move |stream| eval_block(PipelineData::ByteStream(stream, metadata))
};
match input { match stream.into_source() {
// Handle external streams specially, to make sure they pass through ByteStreamSource::Read(read) => {
PipelineData::ExternalStream { if use_stderr {
stdout, return stderr_misuse(span, head);
stderr, }
exit_code,
span,
metadata,
trim_end_newline,
} => {
let known_size = if use_stderr {
stderr.as_ref().and_then(|s| s.known_size)
} else {
stdout.as_ref().and_then(|s| s.known_size)
};
let with_stream = move |rx: mpsc::Receiver<Result<Vec<u8>, ShellError>>| { let tee = IoTee::new(read, span, eval_block)?;
let iter = rx.into_iter();
let input_from_channel = PipelineData::ExternalStream { Ok(PipelineData::ByteStream(
stdout: Some(RawStream::new( ByteStream::read(tee, span, ctrlc),
Box::new(iter), metadata,
closure_engine_state.ctrlc.clone(), ))
span, }
known_size, ByteStreamSource::File(file) => {
)), if use_stderr {
stderr: None, return stderr_misuse(span, head);
exit_code: None, }
span,
metadata: metadata_clone, let tee = IoTee::new(file, span, eval_block)?;
trim_end_newline,
Ok(PipelineData::ByteStream(
ByteStream::read(tee, span, ctrlc),
metadata,
))
}
ByteStreamSource::Child(mut child) => {
let stderr_thread = if use_stderr {
let stderr_thread = if let Some(stderr) = child.stderr.take() {
match stack.stderr() {
OutDest::Pipe | OutDest::Capture => {
let tee = IoTee::new(stderr, span, eval_block)?;
child.stderr = Some(ChildPipe::Tee(Box::new(tee)));
None
}
OutDest::Null => Some(tee_pipe_on_thread(
stderr,
io::sink(),
span,
ctrlc.as_ref(),
eval_block,
)?),
OutDest::Inherit => Some(tee_pipe_on_thread(
stderr,
io::stderr(),
span,
ctrlc.as_ref(),
eval_block,
)?),
OutDest::File(file) => Some(tee_pipe_on_thread(
stderr,
file.clone(),
span,
ctrlc.as_ref(),
eval_block,
)?),
}
} else {
None
};
if let Some(stdout) = child.stdout.take() {
match stack.stdout() {
OutDest::Pipe | OutDest::Capture => {
child.stdout = Some(stdout);
Ok(())
}
OutDest::Null => {
copy_pipe(stdout, io::sink(), span, ctrlc.as_deref())
}
OutDest::Inherit => {
copy_pipe(stdout, io::stdout(), span, ctrlc.as_deref())
}
OutDest::File(file) => {
copy_pipe(stdout, file.as_ref(), span, ctrlc.as_deref())
}
}?;
}
stderr_thread
} else {
let stderr_thread = if let Some(stderr) = child.stderr.take() {
match stack.stderr() {
OutDest::Pipe | OutDest::Capture => {
child.stderr = Some(stderr);
Ok(None)
}
OutDest::Null => {
copy_pipe_on_thread(stderr, io::sink(), span, ctrlc.as_ref())
.map(Some)
}
OutDest::Inherit => {
copy_pipe_on_thread(stderr, io::stderr(), span, ctrlc.as_ref())
.map(Some)
}
OutDest::File(file) => {
copy_pipe_on_thread(stderr, file.clone(), span, ctrlc.as_ref())
.map(Some)
}
}?
} else {
None
};
if let Some(stdout) = child.stdout.take() {
match stack.stdout() {
OutDest::Pipe | OutDest::Capture => {
let tee = IoTee::new(stdout, span, eval_block)?;
child.stdout = Some(ChildPipe::Tee(Box::new(tee)));
Ok(())
}
OutDest::Null => {
tee_pipe(stdout, io::sink(), span, ctrlc.as_deref(), eval_block)
}
OutDest::Inherit => tee_pipe(
stdout,
io::stdout(),
span,
ctrlc.as_deref(),
eval_block,
),
OutDest::File(file) => tee_pipe(
stdout,
file.as_ref(),
span,
ctrlc.as_deref(),
eval_block,
),
}?;
}
stderr_thread
}; };
let result = eval_block_with_early_return(
&closure_engine_state,
&mut closure_stack,
closure_engine_state.get_block(block_id),
input_from_channel,
);
// Make sure to drain any iterator produced to avoid unexpected behavior
result.and_then(|data| data.drain())
};
if use_stderr { if child.stdout.is_some() || child.stderr.is_some() {
let stderr = stderr Ok(PipelineData::ByteStream(
.map(|stderr| { ByteStream::child(*child, span),
let iter = tee(stderr.stream, with_stream).err_span(head)?; metadata,
Ok::<_, ShellError>(RawStream::new( ))
Box::new(iter.map(flatten_result)), } else {
stderr.ctrlc, if let Some(thread) = stderr_thread {
stderr.span, thread.join().unwrap_or_else(|_| Err(panic_error()))?;
stderr.known_size, }
)) child.wait()?;
}) Ok(PipelineData::Empty)
.transpose()?; }
Ok(PipelineData::ExternalStream {
stdout,
stderr,
exit_code,
span,
metadata,
trim_end_newline,
})
} else {
let stdout = stdout
.map(|stdout| {
let iter = tee(stdout.stream, with_stream).err_span(head)?;
Ok::<_, ShellError>(RawStream::new(
Box::new(iter.map(flatten_result)),
stdout.ctrlc,
stdout.span,
stdout.known_size,
))
})
.transpose()?;
Ok(PipelineData::ExternalStream {
stdout,
stderr,
exit_code,
span,
metadata,
trim_end_newline,
})
} }
} }
// --stderr is not allowed if the input is not an external stream } else {
_ if use_stderr => Err(ShellError::UnsupportedInput { if use_stderr {
msg: "--stderr can only be used on external streams".into(), return stderr_misuse(input.span().unwrap_or(head), head);
input: "the input to `tee` is not an external stream".into(),
msg_span: head,
input_span: input.span().unwrap_or(head),
}),
// Handle others with the plain iterator
_ => {
let teed = tee(input.into_iter(), move |rx| {
let input_from_channel = rx.into_pipeline_data_with_metadata(
head,
closure_engine_state.ctrlc.clone(),
metadata_clone,
);
let result = eval_block_with_early_return(
&closure_engine_state,
&mut closure_stack,
closure_engine_state.get_block(block_id),
input_from_channel,
);
// Make sure to drain any iterator produced to avoid unexpected behavior
result.and_then(|data| data.drain())
})
.err_span(head)?
.map(move |result| result.unwrap_or_else(|err| Value::error(err, closure_span)))
.into_pipeline_data_with_metadata(
head,
engine_state.ctrlc.clone(),
metadata,
);
Ok(teed)
} }
let span = input.span().unwrap_or(head);
let ctrlc = engine_state.ctrlc.clone();
let metadata = input.metadata();
let metadata_clone = metadata.clone();
Ok(tee(input.into_iter(), move |rx| {
let input = rx.into_pipeline_data_with_metadata(span, ctrlc, metadata_clone);
eval_block(input)
})
.err_span(call.head)?
.map(move |result| result.unwrap_or_else(|err| Value::error(err, closure_span)))
.into_pipeline_data_with_metadata(
span,
engine_state.ctrlc.clone(),
metadata,
))
} }
} }
@ -213,10 +291,6 @@ fn panic_error() -> ShellError {
} }
} }
fn flatten_result<T, E>(result: Result<Result<T, E>, E>) -> Result<T, E> {
result.unwrap_or_else(Err)
}
/// Copies the iterator to a channel on another thread. If an error is produced on that thread, /// Copies the iterator to a channel on another thread. If an error is produced on that thread,
/// it is embedded in the resulting iterator as an `Err` as soon as possible. When the iterator /// it is embedded in the resulting iterator as an `Err` as soon as possible. When the iterator
/// finishes, it waits for the other thread to finish, also handling any error produced at that /// finishes, it waits for the other thread to finish, also handling any error produced at that
@ -233,7 +307,7 @@ where
let mut thread = Some( let mut thread = Some(
thread::Builder::new() thread::Builder::new()
.name("stderr consumer".into()) .name("tee".into())
.spawn(move || with_cloned_stream(rx))?, .spawn(move || with_cloned_stream(rx))?,
); );
@ -273,6 +347,134 @@ where
})) }))
} }
fn stderr_misuse<T>(span: Span, head: Span) -> Result<T, ShellError> {
Err(ShellError::UnsupportedInput {
msg: "--stderr can only be used on external commands".into(),
input: "the input to `tee` is not an external commands".into(),
msg_span: head,
input_span: span,
})
}
struct IoTee<R: Read> {
reader: R,
sender: Option<Sender<Vec<u8>>>,
thread: Option<JoinHandle<Result<(), ShellError>>>,
}
impl<R: Read> IoTee<R> {
fn new(
reader: R,
span: Span,
eval_block: impl FnOnce(ByteStream) -> Result<(), ShellError> + Send + 'static,
) -> Result<Self, ShellError> {
let (sender, receiver) = mpsc::channel();
let thread = thread::Builder::new()
.name("tee".into())
.spawn(move || eval_block(ByteStream::from_iter(receiver, span, None)))
.err_span(span)?;
Ok(Self {
reader,
sender: Some(sender),
thread: Some(thread),
})
}
}
impl<R: Read> Read for IoTee<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if let Some(thread) = self.thread.take() {
if thread.is_finished() {
if let Err(err) = thread.join().unwrap_or_else(|_| Err(panic_error())) {
return Err(io::Error::new(io::ErrorKind::Other, err));
}
} else {
self.thread = Some(thread)
}
}
let len = self.reader.read(buf)?;
if len == 0 {
self.sender = None;
if let Some(thread) = self.thread.take() {
if let Err(err) = thread.join().unwrap_or_else(|_| Err(panic_error())) {
return Err(io::Error::new(io::ErrorKind::Other, err));
}
}
} else if let Some(sender) = self.sender.as_mut() {
if sender.send(buf[..len].to_vec()).is_err() {
self.sender = None;
}
}
Ok(len)
}
}
fn tee_pipe(
pipe: ChildPipe,
mut dest: impl Write,
span: Span,
ctrlc: Option<&AtomicBool>,
eval_block: impl FnOnce(ByteStream) -> Result<(), ShellError> + Send + 'static,
) -> Result<(), ShellError> {
match pipe {
ChildPipe::Pipe(pipe) => {
let mut tee = IoTee::new(pipe, span, eval_block)?;
copy_with_interrupt(&mut tee, &mut dest, span, ctrlc)?;
}
ChildPipe::Tee(tee) => {
let mut tee = IoTee::new(tee, span, eval_block)?;
copy_with_interrupt(&mut tee, &mut dest, span, ctrlc)?;
}
}
Ok(())
}
fn tee_pipe_on_thread(
pipe: ChildPipe,
dest: impl Write + Send + 'static,
span: Span,
ctrlc: Option<&Arc<AtomicBool>>,
eval_block: impl FnOnce(ByteStream) -> Result<(), ShellError> + Send + 'static,
) -> Result<JoinHandle<Result<(), ShellError>>, ShellError> {
let ctrlc = ctrlc.cloned();
thread::Builder::new()
.name("stderr tee".into())
.spawn(move || tee_pipe(pipe, dest, span, ctrlc.as_deref(), eval_block))
.map_err(|e| e.into_spanned(span).into())
}
fn copy_pipe(
pipe: ChildPipe,
mut dest: impl Write,
span: Span,
ctrlc: Option<&AtomicBool>,
) -> Result<(), ShellError> {
match pipe {
ChildPipe::Pipe(mut pipe) => {
copy_with_interrupt(&mut pipe, &mut dest, span, ctrlc)?;
}
ChildPipe::Tee(mut tee) => {
copy_with_interrupt(&mut tee, &mut dest, span, ctrlc)?;
}
}
Ok(())
}
fn copy_pipe_on_thread(
pipe: ChildPipe,
dest: impl Write + Send + 'static,
span: Span,
ctrlc: Option<&Arc<AtomicBool>>,
) -> Result<JoinHandle<Result<(), ShellError>>, ShellError> {
let ctrlc = ctrlc.cloned();
thread::Builder::new()
.name("stderr copier".into())
.spawn(move || copy_pipe(pipe, dest, span, ctrlc.as_deref()))
.map_err(|e| e.into_spanned(span).into())
}
#[test] #[test]
fn tee_copies_values_to_other_thread_and_passes_them_through() { fn tee_copies_values_to_other_thread_and_passes_them_through() {
let (tx, rx) = mpsc::channel(); let (tx, rx) = mpsc::channel();

View File

@ -225,8 +225,8 @@ fn update(
type_name: "empty pipeline".to_string(), type_name: "empty pipeline".to_string(),
span: head, span: head,
}), }),
PipelineData::ExternalStream { .. } => Err(ShellError::IncompatiblePathAccess { PipelineData::ByteStream(..) => Err(ShellError::IncompatiblePathAccess {
type_name: "external stream".to_string(), type_name: "byte stream".to_string(),
span: head, span: head,
}), }),
} }
@ -250,7 +250,7 @@ fn update_value_by_closure(
let new_value = closure let new_value = closure
.add_arg(arg.clone()) .add_arg(arg.clone())
.run_with_input(value_at_path.into_pipeline_data())? .run_with_input(value_at_path.into_pipeline_data())?
.into_value(span); .into_value(span)?;
value.update_data_at_cell_path(cell_path, new_value) value.update_data_at_cell_path(cell_path, new_value)
} }
@ -273,7 +273,7 @@ fn update_single_value_by_closure(
let new_value = closure let new_value = closure
.add_arg(arg.clone()) .add_arg(arg.clone())
.run_with_input(value_at_path.into_pipeline_data())? .run_with_input(value_at_path.into_pipeline_data())?
.into_value(span); .into_value(span)?;
value.update_data_at_cell_path(cell_path, new_value) value.update_data_at_cell_path(cell_path, new_value)
} }

View File

@ -218,7 +218,7 @@ fn upsert(
if let Value::Closure { val, .. } = replacement { if let Value::Closure { val, .. } = replacement {
ClosureEvalOnce::new(engine_state, stack, *val) ClosureEvalOnce::new(engine_state, stack, *val)
.run_with_value(value)? .run_with_value(value)?
.into_value(head) .into_value(head)?
} else { } else {
replacement replacement
} }
@ -285,8 +285,8 @@ fn upsert(
type_name: "empty pipeline".to_string(), type_name: "empty pipeline".to_string(),
span: head, span: head,
}), }),
PipelineData::ExternalStream { .. } => Err(ShellError::IncompatiblePathAccess { PipelineData::ByteStream(..) => Err(ShellError::IncompatiblePathAccess {
type_name: "external stream".to_string(), type_name: "byte stream".to_string(),
span: head, span: head,
}), }),
} }
@ -311,7 +311,11 @@ fn upsert_value_by_closure(
.map(IntoPipelineData::into_pipeline_data) .map(IntoPipelineData::into_pipeline_data)
.unwrap_or(PipelineData::Empty); .unwrap_or(PipelineData::Empty);
let new_value = closure.add_arg(arg).run_with_input(input)?.into_value(span); let new_value = closure
.add_arg(arg)
.run_with_input(input)?
.into_value(span)?;
value.upsert_data_at_cell_path(cell_path, new_value) value.upsert_data_at_cell_path(cell_path, new_value)
} }
@ -334,7 +338,11 @@ fn upsert_single_value_by_closure(
.map(IntoPipelineData::into_pipeline_data) .map(IntoPipelineData::into_pipeline_data)
.unwrap_or(PipelineData::Empty); .unwrap_or(PipelineData::Empty);
let new_value = closure.add_arg(arg).run_with_input(input)?.into_value(span); let new_value = closure
.add_arg(arg)
.run_with_input(input)?
.into_value(span)?;
value.upsert_data_at_cell_path(cell_path, new_value) value.upsert_data_at_cell_path(cell_path, new_value)
} }

View File

@ -36,7 +36,7 @@ pub fn boolean_fold(
break; break;
} }
let pred = closure.run_with_value(value)?.into_value(head).is_true(); let pred = closure.run_with_value(value)?.into_value(head)?.is_true();
if pred == accumulator { if pred == accumulator {
return Ok(Value::bool(accumulator, head).into_pipeline_data()); return Ok(Value::bool(accumulator, head).into_pipeline_data());

View File

@ -180,13 +180,11 @@ fn values(
Err(err) => Err(err), Err(err) => Err(err),
} }
} }
PipelineData::ExternalStream { .. } => Err(ShellError::OnlySupportsThisInputType { PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "record or table".into(), exp_input_type: "record or table".into(),
wrong_type: "raw data".into(), wrong_type: "byte stream".into(),
dst_span: head, dst_span: head,
src_span: input src_span: stream.span(),
.span()
.expect("PipelineData::ExternalStream had no span"),
}), }),
} }
} }

View File

@ -57,9 +57,14 @@ not supported."#
let metadata = input.metadata(); let metadata = input.metadata();
Ok(input Ok(input
.into_iter_strict(head)? .into_iter_strict(head)?
.filter_map(move |value| match closure.run_with_value(value.clone()) { .filter_map(move |value| {
Ok(data) => data.into_value(head).is_true().then_some(value), match closure
Err(err) => Some(Value::error(err, head)), .run_with_value(value.clone())
.and_then(|data| data.into_value(head))
{
Ok(cond) => cond.is_true().then_some(value),
Err(err) => Some(Value::error(err, head)),
}
}) })
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata)) .into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
} }

View File

@ -43,8 +43,8 @@ impl Command for Wrap {
.into_iter() .into_iter()
.map(move |x| Value::record(record! { name.clone() => x }, span)) .map(move |x| Value::record(record! { name.clone() => x }, span))
.into_pipeline_data_with_metadata(span, engine_state.ctrlc.clone(), metadata)), .into_pipeline_data_with_metadata(span, engine_state.ctrlc.clone(), metadata)),
PipelineData::ExternalStream { .. } => Ok(Value::record( PipelineData::ByteStream(stream, ..) => Ok(Value::record(
record! { name => input.into_value(span) }, record! { name => stream.into_value()? },
span, span,
) )
.into_pipeline_data_with_metadata(metadata)), .into_pipeline_data_with_metadata(metadata)),

View File

@ -59,7 +59,7 @@ impl Command for FromJson {
let (string_input, span, metadata) = input.collect_string_strict(span)?; let (string_input, span, metadata) = input.collect_string_strict(span)?;
if string_input.is_empty() { if string_input.is_empty() {
return Ok(PipelineData::new_with_metadata(metadata, span)); return Ok(Value::nothing(span).into_pipeline_data());
} }
let strict = call.has_flag(engine_state, stack, "strict")?; let strict = call.has_flag(engine_state, stack, "strict")?;

View File

@ -2,9 +2,8 @@
// implementation here is unique. // implementation here is unique.
use std::{ use std::{
collections::VecDeque,
error::Error, error::Error,
io::{self, Cursor, ErrorKind, Write}, io::{self, Cursor, ErrorKind},
string::FromUtf8Error, string::FromUtf8Error,
sync::{atomic::AtomicBool, Arc}, sync::{atomic::AtomicBool, Arc},
}; };
@ -12,7 +11,6 @@ use std::{
use byteorder::{BigEndian, ReadBytesExt}; use byteorder::{BigEndian, ReadBytesExt};
use chrono::{TimeZone, Utc}; use chrono::{TimeZone, Utc};
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use nu_protocol::RawStream;
use rmp::decode::{self as mp, ValueReadError}; use rmp::decode::{self as mp, ValueReadError};
/// Max recursion depth /// Max recursion depth
@ -121,12 +119,20 @@ MessagePack: https://msgpack.org/
read_msgpack(Cursor::new(bytes), opts) read_msgpack(Cursor::new(bytes), opts)
} }
// Deserialize from a raw stream directly without having to collect it // Deserialize from a raw stream directly without having to collect it
PipelineData::ExternalStream { PipelineData::ByteStream(stream, ..) => {
stdout: Some(raw_stream), let span = stream.span();
.. if let Some(reader) = stream.reader() {
} => read_msgpack(ReadRawStream::new(raw_stream), opts), read_msgpack(reader, opts)
} else {
Err(ShellError::PipelineMismatch {
exp_input_type: "binary or byte stream".into(),
dst_span: call.head,
src_span: span,
})
}
}
input => Err(ShellError::PipelineMismatch { input => Err(ShellError::PipelineMismatch {
exp_input_type: "binary".into(), exp_input_type: "binary or byte stream".into(),
dst_span: call.head, dst_span: call.head,
src_span: input.span().unwrap_or(call.head), src_span: input.span().unwrap_or(call.head),
}), }),
@ -483,57 +489,6 @@ where
.map_err(|err| ReadError::Io(err, span)) .map_err(|err| ReadError::Io(err, span))
} }
/// Adapter to read MessagePack from a `RawStream`
///
/// TODO: contribute this back to `RawStream` in general, with more polish, if it works
pub(crate) struct ReadRawStream {
pub stream: RawStream,
// Use a `VecDeque` for read efficiency
pub leftover: VecDeque<u8>,
}
impl ReadRawStream {
pub(crate) fn new(mut stream: RawStream) -> ReadRawStream {
ReadRawStream {
leftover: std::mem::take(&mut stream.leftover).into(),
stream,
}
}
}
impl io::Read for ReadRawStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if buf.is_empty() {
Ok(0)
} else if !self.leftover.is_empty() {
// Take as many leftover bytes as possible
self.leftover.read(buf)
} else {
// Try to get data from the RawStream. We have to be careful not to break on a zero-len
// buffer though, since that would mean EOF
loop {
if let Some(result) = self.stream.stream.next() {
let bytes = result.map_err(|err| io::Error::new(ErrorKind::Other, err))?;
if !bytes.is_empty() {
let min_len = bytes.len().min(buf.len());
let (source, leftover_bytes) = bytes.split_at(min_len);
buf[0..min_len].copy_from_slice(source);
// Keep whatever bytes we couldn't use in the leftover vec
self.leftover.write_all(leftover_bytes)?;
return Ok(min_len);
} else {
// Zero-length buf, continue
continue;
}
} else {
// End of input
return Ok(0);
}
}
}
}
}
/// Return an error if this is not the end of file. /// Return an error if this is not the end of file.
/// ///
/// This can help detect if parsing succeeded incorrectly, perhaps due to corruption. /// This can help detect if parsing succeeded incorrectly, perhaps due to corruption.

View File

@ -2,7 +2,7 @@ use std::io::Cursor;
use nu_engine::command_prelude::*; use nu_engine::command_prelude::*;
use super::msgpack::{read_msgpack, Opts, ReadRawStream}; use super::msgpack::{read_msgpack, Opts};
const BUFFER_SIZE: usize = 65536; const BUFFER_SIZE: usize = 65536;
@ -50,15 +50,21 @@ impl Command for FromMsgpackz {
read_msgpack(reader, opts) read_msgpack(reader, opts)
} }
// Deserialize from a raw stream directly without having to collect it // Deserialize from a raw stream directly without having to collect it
PipelineData::ExternalStream { PipelineData::ByteStream(stream, ..) => {
stdout: Some(raw_stream), let span = stream.span();
.. if let Some(reader) = stream.reader() {
} => { let reader = brotli::Decompressor::new(reader, BUFFER_SIZE);
let reader = brotli::Decompressor::new(ReadRawStream::new(raw_stream), BUFFER_SIZE); read_msgpack(reader, opts)
read_msgpack(reader, opts) } else {
Err(ShellError::PipelineMismatch {
exp_input_type: "binary or byte stream".into(),
dst_span: call.head,
src_span: span,
})
}
} }
_ => Err(ShellError::PipelineMismatch { _ => Err(ShellError::PipelineMismatch {
exp_input_type: "binary".into(), exp_input_type: "binary or byte stream".into(),
dst_span: call.head, dst_span: call.head,
src_span: span, src_span: span,
}), }),

Some files were not shown because too many files have changed in this diff Show More