mirror of
https://github.com/nushell/nushell.git
synced 2024-11-22 00:13:21 +01:00
Replace ExternalStream
with new ByteStream
type (#12774)
# Description This PR introduces a `ByteStream` type which is a `Read`-able stream of bytes. Internally, it has an enum over three different byte stream sources: ```rust pub enum ByteStreamSource { Read(Box<dyn Read + Send + 'static>), File(File), Child(ChildProcess), } ``` This is in comparison to the current `RawStream` type, which is an `Iterator<Item = Vec<u8>>` and has to allocate for each read chunk. Currently, `PipelineData::ExternalStream` serves a weird dual role where it is either external command output or a wrapper around `RawStream`. `ByteStream` makes this distinction more clear (via `ByteStreamSource`) and replaces `PipelineData::ExternalStream` in this PR: ```rust pub enum PipelineData { Empty, Value(Value, Option<PipelineMetadata>), ListStream(ListStream, Option<PipelineMetadata>), ByteStream(ByteStream, Option<PipelineMetadata>), } ``` The PR is relatively large, but a decent amount of it is just repetitive changes. This PR fixes #7017, fixes #10763, and fixes #12369. This PR also improves performance when piping external commands. Nushell should, in most cases, have competitive pipeline throughput compared to, e.g., bash. | Command | Before (MB/s) | After (MB/s) | Bash (MB/s) | | -------------------------------------------------- | -------------:| ------------:| -----------:| | `throughput \| rg 'x'` | 3059 | 3744 | 3739 | | `throughput \| nu --testbin relay o> /dev/null` | 3508 | 8087 | 8136 | # User-Facing Changes - This is a breaking change for the plugin communication protocol, because the `ExternalStreamInfo` was replaced with `ByteStreamInfo`. Plugins now only have to deal with a single input stream, as opposed to the previous three streams: stdout, stderr, and exit code. - The output of `describe` has been changed for external/byte streams. - Temporary breaking change: `bytes starts-with` no longer works with byte streams. This is to keep the PR smaller, and `bytes ends-with` already does not work on byte streams. - If a process core dumped, then instead of having a `Value::Error` in the `exit_code` column of the output returned from `complete`, it now is a `Value::Int` with the negation of the signal number. # After Submitting - Update docs and book as necessary - Release notes (e.g., plugin protocol changes) - Adapt/convert commands to work with byte streams (high priority is `str length`, `bytes starts-with`, and maybe `bytes ends-with`). - Refactor the `tee` code, Devyn has already done some work on this. --------- Co-authored-by: Devyn Cairns <devyn.cairns@gmail.com>
This commit is contained in:
parent
1b8eb23785
commit
6fd854ed9f
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -3254,6 +3254,7 @@ dependencies = [
|
||||
"indexmap",
|
||||
"lru",
|
||||
"miette",
|
||||
"nix",
|
||||
"nu-path",
|
||||
"nu-system",
|
||||
"nu-test-support",
|
||||
|
@ -103,9 +103,8 @@ impl NuCompleter {
|
||||
PipelineData::empty(),
|
||||
);
|
||||
|
||||
match result {
|
||||
Ok(pd) => {
|
||||
let value = pd.into_value(span);
|
||||
match result.and_then(|data| data.into_value(span)) {
|
||||
Ok(value) => {
|
||||
if let Value::List { vals, .. } = value {
|
||||
let result =
|
||||
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
||||
|
@ -74,55 +74,53 @@ impl Completer for CustomCompletion {
|
||||
|
||||
// Parse result
|
||||
let suggestions = result
|
||||
.map(|pd| {
|
||||
let value = pd.into_value(span);
|
||||
match &value {
|
||||
Value::Record { val, .. } => {
|
||||
let completions = val
|
||||
.get("completions")
|
||||
.and_then(|val| {
|
||||
val.as_list()
|
||||
.ok()
|
||||
.map(|it| map_value_completions(it.iter(), span, offset))
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let options = val.get("options");
|
||||
.and_then(|data| data.into_value(span))
|
||||
.map(|value| match &value {
|
||||
Value::Record { val, .. } => {
|
||||
let completions = val
|
||||
.get("completions")
|
||||
.and_then(|val| {
|
||||
val.as_list()
|
||||
.ok()
|
||||
.map(|it| map_value_completions(it.iter(), span, offset))
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let options = val.get("options");
|
||||
|
||||
if let Some(Value::Record { val: options, .. }) = &options {
|
||||
let should_sort = options
|
||||
.get("sort")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(false);
|
||||
if let Some(Value::Record { val: options, .. }) = &options {
|
||||
let should_sort = options
|
||||
.get("sort")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(false);
|
||||
|
||||
if should_sort {
|
||||
self.sort_by = SortBy::Ascending;
|
||||
}
|
||||
|
||||
custom_completion_options = Some(CompletionOptions {
|
||||
case_sensitive: options
|
||||
.get("case_sensitive")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
positional: options
|
||||
.get("positional")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
match_algorithm: match options.get("completion_algorithm") {
|
||||
Some(option) => option
|
||||
.coerce_string()
|
||||
.ok()
|
||||
.and_then(|option| option.try_into().ok())
|
||||
.unwrap_or(MatchAlgorithm::Prefix),
|
||||
None => completion_options.match_algorithm,
|
||||
},
|
||||
});
|
||||
if should_sort {
|
||||
self.sort_by = SortBy::Ascending;
|
||||
}
|
||||
|
||||
completions
|
||||
custom_completion_options = Some(CompletionOptions {
|
||||
case_sensitive: options
|
||||
.get("case_sensitive")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
positional: options
|
||||
.get("positional")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
match_algorithm: match options.get("completion_algorithm") {
|
||||
Some(option) => option
|
||||
.coerce_string()
|
||||
.ok()
|
||||
.and_then(|option| option.try_into().ok())
|
||||
.unwrap_or(MatchAlgorithm::Prefix),
|
||||
None => completion_options.match_algorithm,
|
||||
},
|
||||
});
|
||||
}
|
||||
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
||||
_ => vec![],
|
||||
|
||||
completions
|
||||
}
|
||||
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
||||
_ => vec![],
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
|
@ -306,14 +306,15 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
||||
let mut engine_state = engine_state.clone();
|
||||
let mut stack = Stack::new();
|
||||
|
||||
if !eval_source(
|
||||
if eval_source(
|
||||
&mut engine_state,
|
||||
&mut stack,
|
||||
&old_contents,
|
||||
&old_plugin_file_path.to_string_lossy(),
|
||||
PipelineData::Empty,
|
||||
false,
|
||||
) {
|
||||
) != 0
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
use log::info;
|
||||
use miette::Result;
|
||||
use nu_engine::{convert_env_values, eval_block};
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
@ -59,9 +58,10 @@ pub fn evaluate_commands(
|
||||
t_mode.coerce_str()?.parse().unwrap_or_default();
|
||||
}
|
||||
|
||||
let exit_code = pipeline.print(engine_state, stack, no_newline, false)?;
|
||||
if exit_code != 0 {
|
||||
std::process::exit(exit_code as i32);
|
||||
if let Some(status) = pipeline.print(engine_state, stack, no_newline, false)? {
|
||||
if status.code() != 0 {
|
||||
std::process::exit(status.code())
|
||||
}
|
||||
}
|
||||
|
||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||
|
@ -96,7 +96,7 @@ pub fn evaluate_file(
|
||||
engine_state.merge_delta(working_set.delta)?;
|
||||
|
||||
// Check if the file contains a main command.
|
||||
if engine_state.find_decl(b"main", &[]).is_some() {
|
||||
let exit_code = if engine_state.find_decl(b"main", &[]).is_some() {
|
||||
// Evaluate the file, but don't run main yet.
|
||||
let pipeline =
|
||||
match eval_block::<WithoutDebug>(engine_state, stack, &block, PipelineData::empty()) {
|
||||
@ -109,26 +109,29 @@ pub fn evaluate_file(
|
||||
};
|
||||
|
||||
// Print the pipeline output of the last command of the file.
|
||||
let exit_code = pipeline.print(engine_state, stack, true, false)?;
|
||||
if exit_code != 0 {
|
||||
std::process::exit(exit_code as i32);
|
||||
if let Some(status) = pipeline.print(engine_state, stack, true, false)? {
|
||||
if status.code() != 0 {
|
||||
std::process::exit(status.code())
|
||||
}
|
||||
}
|
||||
|
||||
// Invoke the main command with arguments.
|
||||
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
|
||||
let args = format!("main {}", args.join(" "));
|
||||
if !eval_source(
|
||||
eval_source(
|
||||
engine_state,
|
||||
stack,
|
||||
args.as_bytes(),
|
||||
"<commandline>",
|
||||
input,
|
||||
true,
|
||||
) {
|
||||
std::process::exit(1);
|
||||
}
|
||||
} else if !eval_source(engine_state, stack, &file, file_path_str, input, true) {
|
||||
std::process::exit(1);
|
||||
)
|
||||
} else {
|
||||
eval_source(engine_state, stack, &file, file_path_str, input, true)
|
||||
};
|
||||
|
||||
if exit_code != 0 {
|
||||
std::process::exit(exit_code)
|
||||
}
|
||||
|
||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||
|
@ -59,8 +59,7 @@ impl Completer for NuMenuCompleter {
|
||||
|
||||
let res = eval_block::<WithoutDebug>(&self.engine_state, &mut self.stack, block, input);
|
||||
|
||||
if let Ok(values) = res {
|
||||
let values = values.into_value(self.span);
|
||||
if let Ok(values) = res.and_then(|data| data.into_value(self.span)) {
|
||||
convert_to_suggestions(values, line, pos, self.only_buffer_difference)
|
||||
} else {
|
||||
Vec::new()
|
||||
|
@ -4,7 +4,7 @@ use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token,
|
||||
use nu_protocol::{
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
print_if_stream, report_error, report_error_new, PipelineData, ShellError, Span, Value,
|
||||
report_error, report_error_new, PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
#[cfg(windows)]
|
||||
use nu_utils::enable_vt_processing;
|
||||
@ -206,9 +206,48 @@ pub fn eval_source(
|
||||
fname: &str,
|
||||
input: PipelineData,
|
||||
allow_return: bool,
|
||||
) -> bool {
|
||||
) -> i32 {
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
let exit_code = match evaluate_source(engine_state, stack, source, fname, input, allow_return) {
|
||||
Ok(code) => code.unwrap_or(0),
|
||||
Err(err) => {
|
||||
report_error_new(engine_state, &err);
|
||||
1
|
||||
}
|
||||
};
|
||||
|
||||
stack.add_env_var(
|
||||
"LAST_EXIT_CODE".to_string(),
|
||||
Value::int(exit_code.into(), Span::unknown()),
|
||||
);
|
||||
|
||||
// reset vt processing, aka ansi because illbehaved externals can break it
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let _ = enable_vt_processing();
|
||||
}
|
||||
|
||||
perf(
|
||||
&format!("eval_source {}", &fname),
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine_state.get_config().use_ansi_coloring,
|
||||
);
|
||||
|
||||
exit_code
|
||||
}
|
||||
|
||||
fn evaluate_source(
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
source: &[u8],
|
||||
fname: &str,
|
||||
input: PipelineData,
|
||||
allow_return: bool,
|
||||
) -> Result<Option<i32>, ShellError> {
|
||||
let (block, delta) = {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
let output = parse(
|
||||
@ -222,97 +261,40 @@ pub fn eval_source(
|
||||
}
|
||||
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
set_last_exit_code(stack, 1);
|
||||
report_error(&working_set, err);
|
||||
return false;
|
||||
return Ok(Some(1));
|
||||
}
|
||||
|
||||
(output, working_set.render())
|
||||
};
|
||||
|
||||
if let Err(err) = engine_state.merge_delta(delta) {
|
||||
set_last_exit_code(stack, 1);
|
||||
report_error_new(engine_state, &err);
|
||||
return false;
|
||||
}
|
||||
engine_state.merge_delta(delta)?;
|
||||
|
||||
let b = if allow_return {
|
||||
let pipeline = if allow_return {
|
||||
eval_block_with_early_return::<WithoutDebug>(engine_state, stack, &block, input)
|
||||
} else {
|
||||
eval_block::<WithoutDebug>(engine_state, stack, &block, input)
|
||||
}?;
|
||||
|
||||
let status = if let PipelineData::ByteStream(stream, ..) = pipeline {
|
||||
stream.print(false)?
|
||||
} else {
|
||||
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
|
||||
let pipeline = eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
Some(pipeline),
|
||||
vec![],
|
||||
&hook,
|
||||
"display_output",
|
||||
)?;
|
||||
pipeline.print(engine_state, stack, false, false)
|
||||
} else {
|
||||
pipeline.print(engine_state, stack, true, false)
|
||||
}?
|
||||
};
|
||||
|
||||
match b {
|
||||
Ok(pipeline_data) => {
|
||||
let config = engine_state.get_config();
|
||||
let result;
|
||||
if let PipelineData::ExternalStream {
|
||||
stdout: stream,
|
||||
stderr: stderr_stream,
|
||||
exit_code,
|
||||
..
|
||||
} = pipeline_data
|
||||
{
|
||||
result = print_if_stream(stream, stderr_stream, false, exit_code);
|
||||
} else if let Some(hook) = config.hooks.display_output.clone() {
|
||||
match eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
Some(pipeline_data),
|
||||
vec![],
|
||||
&hook,
|
||||
"display_output",
|
||||
) {
|
||||
Err(err) => {
|
||||
result = Err(err);
|
||||
}
|
||||
Ok(val) => {
|
||||
result = val.print(engine_state, stack, false, false);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
result = pipeline_data.print(engine_state, stack, true, false);
|
||||
}
|
||||
|
||||
match result {
|
||||
Err(err) => {
|
||||
report_error_new(engine_state, &err);
|
||||
return false;
|
||||
}
|
||||
Ok(exit_code) => {
|
||||
set_last_exit_code(stack, exit_code);
|
||||
}
|
||||
}
|
||||
|
||||
// reset vt processing, aka ansi because illbehaved externals can break it
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let _ = enable_vt_processing();
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
set_last_exit_code(stack, 1);
|
||||
report_error_new(engine_state, &err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
perf(
|
||||
&format!("eval_source {}", &fname),
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine_state.get_config().use_ansi_coloring,
|
||||
);
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn set_last_exit_code(stack: &mut Stack, exit_code: i64) {
|
||||
stack.add_env_var(
|
||||
"LAST_EXIT_CODE".to_string(),
|
||||
Value::int(exit_code, Span::unknown()),
|
||||
);
|
||||
Ok(status.map(|status| status.code()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -79,7 +79,7 @@ impl Command for CastDF {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let (dtype, column_nm) = df_args(engine_state, stack, call)?;
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
|
@ -72,8 +72,7 @@ impl Command for FilterWith {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(engine_state, stack, call, df)
|
||||
|
@ -86,7 +86,7 @@ impl Command for FirstDF {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
|
@ -61,7 +61,7 @@ impl Command for LastDF {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
|
@ -109,8 +109,7 @@ impl Command for RenameDF {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(engine_state, stack, call, df)
|
||||
|
@ -76,7 +76,7 @@ impl Command for ToNu {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
dataframe_command(engine_state, stack, call, value)
|
||||
} else {
|
||||
|
@ -102,8 +102,7 @@ impl Command for WithColumn {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(engine_state, stack, call, df)
|
||||
|
@ -172,7 +172,7 @@ macro_rules! lazy_expr_command {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||
let lazy = NuLazyFrame::new(
|
||||
@ -271,7 +271,7 @@ macro_rules! lazy_expr_command {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||
let lazy = NuLazyFrame::new(
|
||||
|
@ -91,7 +91,7 @@ impl Command for ExprOtherwise {
|
||||
let otherwise_predicate: Value = call.req(engine_state, stack, 0)?;
|
||||
let otherwise_predicate = NuExpression::try_from_value(otherwise_predicate)?;
|
||||
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let complete: NuExpression = match NuWhen::try_from_value(value)? {
|
||||
NuWhen::Then(then) => then.otherwise(otherwise_predicate.into_polars()).into(),
|
||||
NuWhen::ChainedThen(chained_when) => chained_when
|
||||
|
@ -67,7 +67,7 @@ impl Command for ExprQuantile {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let quantile: f64 = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let expr = NuExpression::try_from_value(value)?;
|
||||
|
@ -103,7 +103,7 @@ impl Command for ExprWhen {
|
||||
let then_predicate: Value = call.req(engine_state, stack, 1)?;
|
||||
let then_predicate = NuExpression::try_from_value(then_predicate)?;
|
||||
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let when_then: NuWhen = match value {
|
||||
Value::Nothing { .. } => when(when_predicate.into_polars())
|
||||
.then(then_predicate.into_polars())
|
||||
|
@ -100,7 +100,7 @@ impl Command for LazyExplode {
|
||||
}
|
||||
|
||||
pub(crate) fn explode(call: &Call, input: PipelineData) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
let columns: Vec<String> = call
|
||||
|
@ -82,7 +82,7 @@ impl Command for LazyFillNA {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let fill: Value = call.req(engine_state, stack, 0)?;
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
|
||||
if NuExpression::can_downcast(&value) {
|
||||
let expr = NuExpression::try_from_value(value)?;
|
||||
|
@ -59,7 +59,7 @@ impl Command for LazyFillNull {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let fill: Value = call.req(engine_state, stack, 0)?;
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
|
||||
if NuExpression::can_downcast(&value) {
|
||||
let expr = NuExpression::try_from_value(value)?;
|
||||
|
@ -219,7 +219,7 @@ impl Command for LazyJoin {
|
||||
let suffix: Option<String> = call.get_flag(engine_state, stack, "suffix")?;
|
||||
let suffix = suffix.unwrap_or_else(|| "_x".into());
|
||||
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||
let from_eager = lazy.from_eager;
|
||||
let lazy = lazy.into_polars();
|
||||
|
@ -54,7 +54,7 @@ impl Command for LazyQuantile {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let quantile: f64 = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||
|
@ -68,7 +68,7 @@ impl Command for IsNotNull {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
|
@ -68,7 +68,7 @@ impl Command for IsNull {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
|
@ -60,7 +60,7 @@ impl Command for NUnique {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
|
@ -56,8 +56,7 @@ impl Command for Shift {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(engine_state, stack, call, df)
|
||||
|
@ -72,8 +72,7 @@ impl Command for Unique {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(engine_state, stack, call, df)
|
||||
|
@ -80,7 +80,8 @@ pub fn test_dataframe_example(engine_state: &mut Box<EngineState>, example: &Exa
|
||||
let result =
|
||||
eval_block::<WithoutDebug>(engine_state, &mut stack, &block, PipelineData::empty())
|
||||
.unwrap_or_else(|err| panic!("test eval error in `{}`: {:?}", example.example, err))
|
||||
.into_value(Span::test_data());
|
||||
.into_value(Span::test_data())
|
||||
.expect("ok value");
|
||||
|
||||
println!("input: {}", example.example);
|
||||
println!("result: {result:?}");
|
||||
|
@ -297,7 +297,7 @@ impl NuDataFrame {
|
||||
}
|
||||
|
||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
Self::try_from_value(value)
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ impl NuExpression {
|
||||
}
|
||||
|
||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
Self::try_from_value(value)
|
||||
}
|
||||
|
||||
|
@ -134,7 +134,7 @@ impl NuLazyFrame {
|
||||
}
|
||||
|
||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
Self::try_from_value(value)
|
||||
}
|
||||
|
||||
|
@ -107,7 +107,7 @@ impl NuLazyGroupBy {
|
||||
}
|
||||
|
||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
Self::try_from_value(value)
|
||||
}
|
||||
}
|
||||
|
@ -118,22 +118,12 @@ fn into_bits(
|
||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||
|
||||
match input {
|
||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
||||
Ok(Value::binary(vec![], head).into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
let output = stream.into_bytes()?;
|
||||
Ok(Value::binary(output.item, head).into_pipeline_data())
|
||||
}
|
||||
_ => {
|
||||
let args = Arguments { cell_paths };
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
}
|
||||
if let PipelineData::ByteStream(stream, ..) = input {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data())
|
||||
} else {
|
||||
let args = Arguments { cell_paths };
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,38 +78,40 @@ impl Command for EachWhile {
|
||||
| PipelineData::ListStream(..) => {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(input
|
||||
.into_iter()
|
||||
.map_while(move |value| match closure.run_with_value(value) {
|
||||
Ok(data) => {
|
||||
let value = data.into_value(head);
|
||||
(!value.is_nothing()).then_some(value)
|
||||
}
|
||||
Err(_) => None,
|
||||
})
|
||||
.fuse()
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(stream
|
||||
.into_iter()
|
||||
.map_while(move |value| {
|
||||
let value = value.ok()?;
|
||||
match closure.run_with_value(value) {
|
||||
Ok(data) => {
|
||||
let value = data.into_value(head);
|
||||
(!value.is_nothing()).then_some(value)
|
||||
}
|
||||
match closure
|
||||
.run_with_value(value)
|
||||
.and_then(|data| data.into_value(head))
|
||||
{
|
||||
Ok(value) => (!value.is_nothing()).then_some(value),
|
||||
Err(_) => None,
|
||||
}
|
||||
})
|
||||
.fuse()
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
let span = stream.span();
|
||||
if let Some(chunks) = stream.chunks() {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(chunks
|
||||
.map_while(move |value| {
|
||||
let value = value.ok()?;
|
||||
match closure
|
||||
.run_with_value(value)
|
||||
.and_then(|data| data.into_value(span))
|
||||
{
|
||||
Ok(value) => (!value.is_nothing()).then_some(value),
|
||||
Err(_) => None,
|
||||
}
|
||||
})
|
||||
.fuse()
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
}
|
||||
// This match allows non-iterables to be accepted,
|
||||
// which is currently considered undesirable (Nov 2022).
|
||||
PipelineData::Value(value, ..) => {
|
||||
|
@ -56,7 +56,7 @@ impl Command for RollDown {
|
||||
let by: Option<usize> = call.get_flag(engine_state, stack, "by")?;
|
||||
let metadata = input.metadata();
|
||||
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Down)?;
|
||||
|
||||
Ok(rotated_value.into_pipeline_data().set_metadata(metadata))
|
||||
|
@ -94,7 +94,7 @@ impl Command for RollLeft {
|
||||
let metadata = input.metadata();
|
||||
|
||||
let cells_only = call.has_flag(engine_state, stack, "cells-only")?;
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let rotated_value =
|
||||
horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Left)?;
|
||||
|
||||
|
@ -94,7 +94,7 @@ impl Command for RollRight {
|
||||
let metadata = input.metadata();
|
||||
|
||||
let cells_only = call.has_flag(engine_state, stack, "cells-only")?;
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let rotated_value =
|
||||
horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Right)?;
|
||||
|
||||
|
@ -56,7 +56,7 @@ impl Command for RollUp {
|
||||
let by: Option<usize> = call.get_flag(engine_state, stack, "by")?;
|
||||
let metadata = input.metadata();
|
||||
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Up)?;
|
||||
|
||||
Ok(rotated_value.into_pipeline_data().set_metadata(metadata))
|
||||
|
@ -152,7 +152,7 @@ impl Iterator for UpdateCellIterator {
|
||||
fn eval_value(closure: &mut ClosureEval, span: Span, value: Value) -> Value {
|
||||
closure
|
||||
.run_with_value(value)
|
||||
.map(|data| data.into_value(span))
|
||||
.and_then(|data| data.into_value(span))
|
||||
.unwrap_or_else(|err| Value::error(err, span))
|
||||
}
|
||||
|
||||
|
@ -39,7 +39,7 @@ impl Command for FormatPattern {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
let specified_pattern: Result<Value, ShellError> = call.req(engine_state, stack, 0);
|
||||
let input_val = input.into_value(call.head);
|
||||
let input_val = input.into_value(call.head)?;
|
||||
// add '$it' variable to support format like this: $it.column1.column2.
|
||||
let it_id = working_set.add_variable(b"$it".to_vec(), call.head, Type::Any, false);
|
||||
stack.add_var(it_id, input_val.clone());
|
||||
|
@ -19,102 +19,102 @@ fn basic_string_fails() {
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn short_stream_binary() {
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101]
|
||||
"#);
|
||||
// #[test]
|
||||
// fn short_stream_binary() {
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101]
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
// assert_eq!(actual.out, "true");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn short_stream_mismatch() {
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[010203]) 5 | bytes starts-with 0x[010204]
|
||||
"#);
|
||||
// #[test]
|
||||
// fn short_stream_mismatch() {
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[010203]) 5 | bytes starts-with 0x[010204]
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "false");
|
||||
}
|
||||
// assert_eq!(actual.out, "false");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn short_stream_binary_overflow() {
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101010101]
|
||||
"#);
|
||||
// #[test]
|
||||
// fn short_stream_binary_overflow() {
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101010101]
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "false");
|
||||
}
|
||||
// assert_eq!(actual.out, "false");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_binary() {
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[01]) 32768 | bytes starts-with 0x[010101]
|
||||
"#);
|
||||
// #[test]
|
||||
// fn long_stream_binary() {
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[01]) 32768 | bytes starts-with 0x[010101]
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
// assert_eq!(actual.out, "true");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_binary_overflow() {
|
||||
// .. ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[01]) 32768 | bytes starts-with (0..32768 | each {|| 0x[01] } | bytes collect)
|
||||
"#);
|
||||
// #[test]
|
||||
// fn long_stream_binary_overflow() {
|
||||
// // .. ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[01]) 32768 | bytes starts-with (0..32768 | each {|| 0x[01] } | bytes collect)
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "false");
|
||||
}
|
||||
// assert_eq!(actual.out, "false");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_binary_exact() {
|
||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[01020304]) 8192 | bytes starts-with (0..<8192 | each {|| 0x[01020304] } | bytes collect)
|
||||
"#);
|
||||
// #[test]
|
||||
// fn long_stream_binary_exact() {
|
||||
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[01020304]) 8192 | bytes starts-with (0..<8192 | each {|| 0x[01020304] } | bytes collect)
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
// assert_eq!(actual.out, "true");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_string_exact() {
|
||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater hell 8192 | bytes starts-with (0..<8192 | each {|| "hell" | into binary } | bytes collect)
|
||||
"#);
|
||||
// #[test]
|
||||
// fn long_stream_string_exact() {
|
||||
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater hell 8192 | bytes starts-with (0..<8192 | each {|| "hell" | into binary } | bytes collect)
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
// assert_eq!(actual.out, "true");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_mixed_exact() {
|
||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
let actual = nu!(r#"
|
||||
let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
||||
let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
||||
// #[test]
|
||||
// fn long_stream_mixed_exact() {
|
||||
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
// let actual = nu!(r#"
|
||||
// let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
||||
// let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
||||
|
||||
nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg)
|
||||
"#);
|
||||
// nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg)
|
||||
// "#);
|
||||
|
||||
assert_eq!(
|
||||
actual.err, "",
|
||||
"invocation failed. command line limit likely reached"
|
||||
);
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
// assert_eq!(
|
||||
// actual.err, "",
|
||||
// "invocation failed. command line limit likely reached"
|
||||
// );
|
||||
// assert_eq!(actual.out, "true");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_mixed_overflow() {
|
||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
let actual = nu!(r#"
|
||||
let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
||||
let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
||||
// #[test]
|
||||
// fn long_stream_mixed_overflow() {
|
||||
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
// let actual = nu!(r#"
|
||||
// let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
||||
// let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
||||
|
||||
nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg 0x[01])
|
||||
"#);
|
||||
// nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg 0x[01])
|
||||
// "#);
|
||||
|
||||
assert_eq!(
|
||||
actual.err, "",
|
||||
"invocation failed. command line limit likely reached"
|
||||
);
|
||||
assert_eq!(actual.out, "false");
|
||||
}
|
||||
// assert_eq!(
|
||||
// actual.err, "",
|
||||
// "invocation failed. command line limit likely reached"
|
||||
// );
|
||||
// assert_eq!(actual.out, "false");
|
||||
// }
|
||||
|
@ -43,7 +43,7 @@ impl Command for Collect {
|
||||
stack.captures_to_stack_preserve_out_dest(closure.captures.clone());
|
||||
|
||||
let metadata = input.metadata();
|
||||
let input = input.into_value(call.head);
|
||||
let input = input.into_value(call.head)?;
|
||||
|
||||
let mut saved_positional = None;
|
||||
if let Some(var) = block.signature.get_positional(0) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{engine::StateWorkingSet, PipelineMetadata};
|
||||
use nu_protocol::{engine::StateWorkingSet, ByteStreamSource, PipelineMetadata};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Describe;
|
||||
@ -162,73 +162,38 @@ fn run(
|
||||
let metadata = input.metadata();
|
||||
|
||||
let description = match input {
|
||||
PipelineData::ExternalStream {
|
||||
ref stdout,
|
||||
ref stderr,
|
||||
ref exit_code,
|
||||
..
|
||||
} => {
|
||||
if options.detailed {
|
||||
let stdout = if stdout.is_some() {
|
||||
Value::record(
|
||||
record! {
|
||||
"type" => Value::string("stream", head),
|
||||
"origin" => Value::string("external", head),
|
||||
"subtype" => Value::string("any", head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
} else {
|
||||
Value::nothing(head)
|
||||
};
|
||||
|
||||
let stderr = if stderr.is_some() {
|
||||
Value::record(
|
||||
record! {
|
||||
"type" => Value::string("stream", head),
|
||||
"origin" => Value::string("external", head),
|
||||
"subtype" => Value::string("any", head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
} else {
|
||||
Value::nothing(head)
|
||||
};
|
||||
|
||||
let exit_code = if exit_code.is_some() {
|
||||
Value::record(
|
||||
record! {
|
||||
"type" => Value::string("stream", head),
|
||||
"origin" => Value::string("external", head),
|
||||
"subtype" => Value::string("int", head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
} else {
|
||||
Value::nothing(head)
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
let description = if options.detailed {
|
||||
let origin = match stream.source() {
|
||||
ByteStreamSource::Read(_) => "unknown",
|
||||
ByteStreamSource::File(_) => "file",
|
||||
ByteStreamSource::Child(_) => "external",
|
||||
};
|
||||
|
||||
Value::record(
|
||||
record! {
|
||||
"type" => Value::string("stream", head),
|
||||
"origin" => Value::string("external", head),
|
||||
"stdout" => stdout,
|
||||
"stderr" => stderr,
|
||||
"exit_code" => exit_code,
|
||||
"type" => Value::string("byte stream", head),
|
||||
"origin" => Value::string(origin, head),
|
||||
"metadata" => metadata_to_value(metadata, head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
} else {
|
||||
Value::string("raw input", head)
|
||||
Value::string("byte stream", head)
|
||||
};
|
||||
|
||||
if !options.no_collect {
|
||||
stream.drain()?;
|
||||
}
|
||||
|
||||
description
|
||||
}
|
||||
PipelineData::ListStream(_, _) => {
|
||||
PipelineData::ListStream(stream, ..) => {
|
||||
if options.detailed {
|
||||
let subtype = if options.no_collect {
|
||||
Value::string("any", head)
|
||||
} else {
|
||||
describe_value(input.into_value(head), head, engine_state)
|
||||
describe_value(stream.into_value(), head, engine_state)
|
||||
};
|
||||
Value::record(
|
||||
record! {
|
||||
@ -242,19 +207,19 @@ fn run(
|
||||
} else if options.no_collect {
|
||||
Value::string("stream", head)
|
||||
} else {
|
||||
let value = input.into_value(head);
|
||||
let value = stream.into_value();
|
||||
let base_description = value.get_type().to_string();
|
||||
Value::string(format!("{} (stream)", base_description), head)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
let value = input.into_value(head);
|
||||
PipelineData::Value(value, ..) => {
|
||||
if !options.detailed {
|
||||
Value::string(value.get_type().to_string(), head)
|
||||
} else {
|
||||
describe_value(value, head, engine_state)
|
||||
}
|
||||
}
|
||||
PipelineData::Empty => Value::string(Type::Nothing.to_string(), head),
|
||||
};
|
||||
|
||||
Ok(description.into_pipeline_data())
|
||||
|
@ -1,6 +1,13 @@
|
||||
use nu_engine::{command_prelude::*, get_eval_block_with_early_return, redirect_env};
|
||||
use nu_protocol::{engine::Closure, ListStream, OutDest, RawStream};
|
||||
use std::thread;
|
||||
use nu_protocol::{
|
||||
engine::Closure,
|
||||
process::{ChildPipe, ChildProcess, ExitStatus},
|
||||
ByteStream, ByteStreamSource, OutDest,
|
||||
};
|
||||
use std::{
|
||||
io::{Cursor, Read},
|
||||
thread,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Do;
|
||||
@ -86,115 +93,91 @@ impl Command for Do {
|
||||
}
|
||||
|
||||
match result {
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code,
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
}) if capture_errors => {
|
||||
// Use a thread to receive stdout message.
|
||||
// Or we may get a deadlock if child process sends out too much bytes to stderr.
|
||||
//
|
||||
// For example: in normal linux system, stderr pipe's limit is 65535 bytes.
|
||||
// if child process sends out 65536 bytes, the process will be hanged because no consumer
|
||||
// consumes the first 65535 bytes
|
||||
// So we need a thread to receive stdout message, then the current thread can continue to consume
|
||||
// stderr messages.
|
||||
let stdout_handler = stdout
|
||||
.map(|stdout_stream| {
|
||||
thread::Builder::new()
|
||||
.name("stderr redirector".to_string())
|
||||
.spawn(move || {
|
||||
let ctrlc = stdout_stream.ctrlc.clone();
|
||||
let span = stdout_stream.span;
|
||||
RawStream::new(
|
||||
Box::new(std::iter::once(
|
||||
stdout_stream.into_bytes().map(|s| s.item),
|
||||
)),
|
||||
ctrlc,
|
||||
span,
|
||||
None,
|
||||
)
|
||||
Ok(PipelineData::ByteStream(stream, metadata)) if capture_errors => {
|
||||
let span = stream.span();
|
||||
match stream.into_child() {
|
||||
Ok(mut child) => {
|
||||
// Use a thread to receive stdout message.
|
||||
// Or we may get a deadlock if child process sends out too much bytes to stderr.
|
||||
//
|
||||
// For example: in normal linux system, stderr pipe's limit is 65535 bytes.
|
||||
// if child process sends out 65536 bytes, the process will be hanged because no consumer
|
||||
// consumes the first 65535 bytes
|
||||
// So we need a thread to receive stdout message, then the current thread can continue to consume
|
||||
// stderr messages.
|
||||
let stdout_handler = child
|
||||
.stdout
|
||||
.take()
|
||||
.map(|mut stdout| {
|
||||
thread::Builder::new()
|
||||
.name("stdout consumer".to_string())
|
||||
.spawn(move || {
|
||||
let mut buf = Vec::new();
|
||||
stdout.read_to_end(&mut buf)?;
|
||||
Ok::<_, ShellError>(buf)
|
||||
})
|
||||
.err_span(head)
|
||||
})
|
||||
.err_span(head)
|
||||
})
|
||||
.transpose()?;
|
||||
.transpose()?;
|
||||
|
||||
// Intercept stderr so we can return it in the error if the exit code is non-zero.
|
||||
// The threading issues mentioned above dictate why we also need to intercept stdout.
|
||||
let mut stderr_ctrlc = None;
|
||||
let stderr_msg = match stderr {
|
||||
None => "".to_string(),
|
||||
Some(stderr_stream) => {
|
||||
stderr_ctrlc.clone_from(&stderr_stream.ctrlc);
|
||||
stderr_stream.into_string().map(|s| s.item)?
|
||||
}
|
||||
};
|
||||
// Intercept stderr so we can return it in the error if the exit code is non-zero.
|
||||
// The threading issues mentioned above dictate why we also need to intercept stdout.
|
||||
let stderr_msg = match child.stderr.take() {
|
||||
None => String::new(),
|
||||
Some(mut stderr) => {
|
||||
let mut buf = String::new();
|
||||
stderr.read_to_string(&mut buf).err_span(span)?;
|
||||
buf
|
||||
}
|
||||
};
|
||||
|
||||
let stdout = if let Some(handle) = stdout_handler {
|
||||
match handle.join() {
|
||||
Err(err) => {
|
||||
let stdout = if let Some(handle) = stdout_handler {
|
||||
match handle.join() {
|
||||
Err(err) => {
|
||||
return Err(ShellError::ExternalCommand {
|
||||
label: "Fail to receive external commands stdout message"
|
||||
.to_string(),
|
||||
help: format!("{err:?}"),
|
||||
span,
|
||||
});
|
||||
}
|
||||
Ok(res) => Some(res?),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if child.wait()? != ExitStatus::Exited(0) {
|
||||
return Err(ShellError::ExternalCommand {
|
||||
label: "Fail to receive external commands stdout message"
|
||||
.to_string(),
|
||||
help: format!("{err:?}"),
|
||||
label: "External command failed".to_string(),
|
||||
help: stderr_msg,
|
||||
span,
|
||||
});
|
||||
}
|
||||
Ok(res) => Some(res),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let exit_code: Vec<Value> = match exit_code {
|
||||
None => vec![],
|
||||
Some(exit_code_stream) => exit_code_stream.into_iter().collect(),
|
||||
};
|
||||
if let Some(Value::Int { val: code, .. }) = exit_code.last() {
|
||||
if *code != 0 {
|
||||
return Err(ShellError::ExternalCommand {
|
||||
label: "External command failed".to_string(),
|
||||
help: stderr_msg,
|
||||
span,
|
||||
});
|
||||
let mut child = ChildProcess::from_raw(None, None, None, span);
|
||||
if let Some(stdout) = stdout {
|
||||
child.stdout = Some(ChildPipe::Tee(Box::new(Cursor::new(stdout))));
|
||||
}
|
||||
if !stderr_msg.is_empty() {
|
||||
child.stderr = Some(ChildPipe::Tee(Box::new(Cursor::new(stderr_msg))));
|
||||
}
|
||||
Ok(PipelineData::ByteStream(
|
||||
ByteStream::child(child, span),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
Err(stream) => Ok(PipelineData::ByteStream(stream, metadata)),
|
||||
}
|
||||
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr: Some(RawStream::new(
|
||||
Box::new(std::iter::once(Ok(stderr_msg.into_bytes()))),
|
||||
stderr_ctrlc,
|
||||
span,
|
||||
None,
|
||||
)),
|
||||
exit_code: Some(ListStream::new(exit_code.into_iter(), span, None)),
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
})
|
||||
}
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code: _,
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
}) if ignore_program_errors
|
||||
&& !matches!(caller_stack.stdout(), OutDest::Pipe | OutDest::Capture) =>
|
||||
Ok(PipelineData::ByteStream(mut stream, metadata))
|
||||
if ignore_program_errors
|
||||
&& !matches!(caller_stack.stdout(), OutDest::Pipe | OutDest::Capture) =>
|
||||
{
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code: None,
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
})
|
||||
if let ByteStreamSource::Child(child) = stream.source_mut() {
|
||||
child.set_exit_code(0)
|
||||
}
|
||||
Ok(PipelineData::ByteStream(stream, metadata))
|
||||
}
|
||||
Ok(PipelineData::Value(Value::Error { .. }, ..)) | Err(_) if ignore_shell_errors => {
|
||||
Ok(PipelineData::empty())
|
||||
|
@ -121,12 +121,14 @@ impl Command for For {
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
Ok(pipeline) => {
|
||||
let exit_code = pipeline.drain_with_exit_code()?;
|
||||
if exit_code != 0 {
|
||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(
|
||||
exit_code,
|
||||
));
|
||||
Ok(data) => {
|
||||
if let Some(status) = data.drain()? {
|
||||
let code = status.code();
|
||||
if code != 0 {
|
||||
return Ok(
|
||||
PipelineData::new_external_stream_with_only_exit_code(code),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -159,12 +161,14 @@ impl Command for For {
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
Ok(pipeline) => {
|
||||
let exit_code = pipeline.drain_with_exit_code()?;
|
||||
if exit_code != 0 {
|
||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(
|
||||
exit_code,
|
||||
));
|
||||
Ok(data) => {
|
||||
if let Some(status) = data.drain()? {
|
||||
let code = status.code();
|
||||
if code != 0 {
|
||||
return Ok(
|
||||
PipelineData::new_external_stream_with_only_exit_code(code),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -173,7 +177,7 @@ impl Command for For {
|
||||
x => {
|
||||
stack.add_var(var_id, x);
|
||||
|
||||
eval_block(&engine_state, stack, block, PipelineData::empty())?.into_value(head);
|
||||
eval_block(&engine_state, stack, block, PipelineData::empty())?.into_value(head)?;
|
||||
}
|
||||
}
|
||||
Ok(PipelineData::empty())
|
||||
|
@ -61,7 +61,7 @@ impl Command for Let {
|
||||
let eval_block = get_eval_block(engine_state);
|
||||
let stack = &mut stack.start_capture();
|
||||
let pipeline_data = eval_block(engine_state, stack, block, input)?;
|
||||
let value = pipeline_data.into_value(call.head);
|
||||
let value = pipeline_data.into_value(call.head)?;
|
||||
|
||||
// if given variable type is Glob, and our result is string
|
||||
// then nushell need to convert from Value::String to Value::Glob
|
||||
|
@ -53,12 +53,12 @@ impl Command for Loop {
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
Ok(pipeline) => {
|
||||
let exit_code = pipeline.drain_with_exit_code()?;
|
||||
if exit_code != 0 {
|
||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(
|
||||
exit_code,
|
||||
));
|
||||
Ok(data) => {
|
||||
if let Some(status) = data.drain()? {
|
||||
let code = status.code();
|
||||
if code != 0 {
|
||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(code));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ impl Command for Mut {
|
||||
let eval_block = get_eval_block(engine_state);
|
||||
let stack = &mut stack.start_capture();
|
||||
let pipeline_data = eval_block(engine_state, stack, block, input)?;
|
||||
let value = pipeline_data.into_value(call.head);
|
||||
let value = pipeline_data.into_value(call.head)?;
|
||||
|
||||
// if given variable type is Glob, and our result is string
|
||||
// then nushell need to convert from Value::String to Value::Glob
|
||||
|
@ -62,10 +62,11 @@ impl Command for Try {
|
||||
}
|
||||
// external command may fail to run
|
||||
Ok(pipeline) => {
|
||||
let (pipeline, external_failed) = pipeline.check_external_failed();
|
||||
let (pipeline, external_failed) = pipeline.check_external_failed()?;
|
||||
if external_failed {
|
||||
let exit_code = pipeline.drain_with_exit_code()?;
|
||||
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(exit_code, call.head));
|
||||
let status = pipeline.drain()?;
|
||||
let code = status.map(|status| status.code()).unwrap_or(0);
|
||||
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(code.into(), call.head));
|
||||
let err_value = Value::nothing(call.head);
|
||||
handle_catch(err_value, catch_block, engine_state, stack, eval_block)
|
||||
} else {
|
||||
|
@ -70,14 +70,16 @@ impl Command for While {
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
Ok(pipeline) => {
|
||||
let exit_code = pipeline.drain_with_exit_code()?;
|
||||
if exit_code != 0 {
|
||||
return Ok(
|
||||
PipelineData::new_external_stream_with_only_exit_code(
|
||||
exit_code,
|
||||
),
|
||||
);
|
||||
Ok(data) => {
|
||||
if let Some(status) = data.drain()? {
|
||||
let code = status.code();
|
||||
if code != 0 {
|
||||
return Ok(
|
||||
PipelineData::new_external_stream_with_only_exit_code(
|
||||
code,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -122,10 +122,9 @@ pub fn eval_block(
|
||||
|
||||
stack.add_env_var("PWD".to_string(), Value::test_string(cwd.to_string_lossy()));
|
||||
|
||||
match nu_engine::eval_block::<WithoutDebug>(engine_state, &mut stack, &block, input) {
|
||||
Err(err) => panic!("test eval error in `{}`: {:?}", "TODO", err),
|
||||
Ok(result) => result.into_value(Span::test_data()),
|
||||
}
|
||||
nu_engine::eval_block::<WithoutDebug>(engine_state, &mut stack, &block, input)
|
||||
.and_then(|data| data.into_value(Span::test_data()))
|
||||
.unwrap_or_else(|err| panic!("test eval error in `{}`: {:?}", "TODO", err))
|
||||
}
|
||||
|
||||
pub fn check_example_evaluates_to_expected_output(
|
||||
|
@ -58,11 +58,11 @@ impl<'a> StyleComputer<'a> {
|
||||
Some(ComputableStyle::Closure(closure, span)) => {
|
||||
let result = ClosureEvalOnce::new(self.engine_state, self.stack, closure.clone())
|
||||
.debug(false)
|
||||
.run_with_value(value.clone());
|
||||
.run_with_value(value.clone())
|
||||
.and_then(|data| data.into_value(*span));
|
||||
|
||||
match result {
|
||||
Ok(v) => {
|
||||
let value = v.into_value(*span);
|
||||
Ok(value) => {
|
||||
// These should be the same color data forms supported by color_config.
|
||||
match value {
|
||||
Value::Record { .. } => color_record_to_nustyle(&value),
|
||||
|
@ -60,63 +60,13 @@ impl Command for BytesStartsWith {
|
||||
pattern,
|
||||
cell_paths,
|
||||
};
|
||||
|
||||
match input {
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
span,
|
||||
..
|
||||
} => {
|
||||
let mut i = 0;
|
||||
|
||||
for item in stream {
|
||||
let byte_slice = match &item {
|
||||
// String and binary data are valid byte patterns
|
||||
Ok(Value::String { val, .. }) => val.as_bytes(),
|
||||
Ok(Value::Binary { val, .. }) => val,
|
||||
// If any Error value is output, echo it back
|
||||
Ok(v @ Value::Error { .. }) => return Ok(v.clone().into_pipeline_data()),
|
||||
// Unsupported data
|
||||
Ok(other) => {
|
||||
return Ok(Value::error(
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string and binary".into(),
|
||||
wrong_type: other.get_type().to_string(),
|
||||
dst_span: span,
|
||||
src_span: other.span(),
|
||||
},
|
||||
span,
|
||||
)
|
||||
.into_pipeline_data());
|
||||
}
|
||||
Err(err) => return Err(err.to_owned()),
|
||||
};
|
||||
|
||||
let max = byte_slice.len().min(arg.pattern.len() - i);
|
||||
|
||||
if byte_slice[..max] == arg.pattern[i..i + max] {
|
||||
i += max;
|
||||
|
||||
if i >= arg.pattern.len() {
|
||||
return Ok(Value::bool(true, span).into_pipeline_data());
|
||||
}
|
||||
} else {
|
||||
return Ok(Value::bool(false, span).into_pipeline_data());
|
||||
}
|
||||
}
|
||||
|
||||
// We reached the end of the stream and never returned,
|
||||
// the pattern wasn't exhausted so it probably doesn't match
|
||||
Ok(Value::bool(false, span).into_pipeline_data())
|
||||
}
|
||||
_ => operate(
|
||||
starts_with,
|
||||
arg,
|
||||
input,
|
||||
call.head,
|
||||
engine_state.ctrlc.clone(),
|
||||
),
|
||||
}
|
||||
operate(
|
||||
starts_with,
|
||||
arg,
|
||||
input,
|
||||
call.head,
|
||||
engine_state.ctrlc.clone(),
|
||||
)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -121,7 +121,7 @@ impl Command for Histogram {
|
||||
};
|
||||
|
||||
let span = call.head;
|
||||
let data_as_value = input.into_value(span);
|
||||
let data_as_value = input.into_value(span)?;
|
||||
let value_span = data_as_value.span();
|
||||
// `input` is not a list, here we can return an error.
|
||||
run_histogram(
|
||||
|
@ -127,25 +127,15 @@ fn into_binary(
|
||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||
|
||||
match input {
|
||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
||||
Ok(Value::binary(vec![], head).into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
let output = stream.into_bytes()?;
|
||||
Ok(Value::binary(output.item, head).into_pipeline_data())
|
||||
}
|
||||
_ => {
|
||||
let args = Arguments {
|
||||
cell_paths,
|
||||
compact: call.has_flag(engine_state, stack, "compact")?,
|
||||
};
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
}
|
||||
if let PipelineData::ByteStream(stream, ..) = input {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data())
|
||||
} else {
|
||||
let args = Arguments {
|
||||
cell_paths,
|
||||
compact: call.has_flag(engine_state, stack, "compact")?,
|
||||
};
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -101,11 +101,11 @@ fn into_cell_path(call: &Call, input: PipelineData) -> Result<PipelineData, Shel
|
||||
let list: Vec<_> = stream.into_iter().collect();
|
||||
Ok(list_to_cell_path(&list, head)?.into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType {
|
||||
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, int".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
wrong_type: "byte stream".into(),
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: head }),
|
||||
}
|
||||
|
@ -82,20 +82,12 @@ fn glob_helper(
|
||||
let head = call.head;
|
||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||
let args = Arguments { cell_paths };
|
||||
match input {
|
||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
||||
Ok(Value::glob(String::new(), false, head).into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
let output = stream.into_string()?;
|
||||
Ok(Value::glob(output.item, false, head).into_pipeline_data())
|
||||
}
|
||||
_ => operate(action, args, input, head, engine_state.ctrlc.clone()),
|
||||
if let PipelineData::ByteStream(stream, ..) = input {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
Ok(Value::glob(stream.into_string()?, false, head).into_pipeline_data())
|
||||
} else {
|
||||
let args = Arguments { cell_paths };
|
||||
operate(action, args, input, head, engine_state.ctrlc.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,7 +108,7 @@ fn into_record(
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let input = input.into_value(call.head);
|
||||
let input = input.into_value(call.head)?;
|
||||
let input_type = input.get_type();
|
||||
let span = input.span();
|
||||
let res = match input {
|
||||
|
@ -155,26 +155,18 @@ fn string_helper(
|
||||
}
|
||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||
let config = engine_state.get_config().clone();
|
||||
let args = Arguments {
|
||||
decimals_value,
|
||||
cell_paths,
|
||||
config,
|
||||
};
|
||||
|
||||
match input {
|
||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
||||
Ok(Value::string(String::new(), head).into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
let output = stream.into_string()?;
|
||||
Ok(Value::string(output.item, head).into_pipeline_data())
|
||||
}
|
||||
_ => operate(action, args, input, head, engine_state.ctrlc.clone()),
|
||||
if let PipelineData::ByteStream(stream, ..) = input {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
Ok(Value::string(stream.into_string()?, head).into_pipeline_data())
|
||||
} else {
|
||||
let config = engine_state.get_config().clone();
|
||||
let args = Arguments {
|
||||
decimals_value,
|
||||
cell_paths,
|
||||
config,
|
||||
};
|
||||
operate(action, args, input, head, engine_state.ctrlc.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -91,7 +91,7 @@ impl SQLiteDatabase {
|
||||
}
|
||||
|
||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
Self::try_from_value(value)
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ impl Command for Inspect {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let input_metadata = input.metadata();
|
||||
let input_val = input.into_value(call.head);
|
||||
let input_val = input.into_value(call.head)?;
|
||||
if input_val.is_nothing() {
|
||||
return Err(ShellError::PipelineEmpty {
|
||||
dst_span: call.head,
|
||||
|
@ -53,13 +53,12 @@ impl Command for TimeIt {
|
||||
eval_block(engine_state, stack, block, input)?
|
||||
} else {
|
||||
let eval_expression_with_input = get_eval_expression_with_input(engine_state);
|
||||
eval_expression_with_input(engine_state, stack, command_to_run, input)
|
||||
.map(|res| res.0)?
|
||||
eval_expression_with_input(engine_state, stack, command_to_run, input)?.0
|
||||
}
|
||||
} else {
|
||||
PipelineData::empty()
|
||||
}
|
||||
.into_value(call.head);
|
||||
.into_value(call.head)?;
|
||||
|
||||
let end_time = Instant::now();
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use super::util::get_rest_for_glob_pattern;
|
||||
#[allow(deprecated)]
|
||||
use nu_engine::{command_prelude::*, current_dir, get_eval_block};
|
||||
use nu_protocol::{BufferedReader, DataSource, NuGlob, PipelineMetadata, RawStream};
|
||||
use std::{io::BufReader, path::Path};
|
||||
use nu_protocol::{ByteStream, DataSource, NuGlob, PipelineMetadata};
|
||||
use std::path::Path;
|
||||
|
||||
#[cfg(feature = "sqlite")]
|
||||
use crate::database::SQLiteDatabase;
|
||||
@ -143,23 +143,13 @@ impl Command for Open {
|
||||
}
|
||||
};
|
||||
|
||||
let buf_reader = BufReader::new(file);
|
||||
|
||||
let file_contents = PipelineData::ExternalStream {
|
||||
stdout: Some(RawStream::new(
|
||||
Box::new(BufferedReader::new(buf_reader)),
|
||||
ctrlc.clone(),
|
||||
call_span,
|
||||
None,
|
||||
)),
|
||||
stderr: None,
|
||||
exit_code: None,
|
||||
span: call_span,
|
||||
metadata: Some(PipelineMetadata {
|
||||
let stream = PipelineData::ByteStream(
|
||||
ByteStream::file(file, call_span, ctrlc.clone()),
|
||||
Some(PipelineMetadata {
|
||||
data_source: DataSource::FilePath(path.to_path_buf()),
|
||||
}),
|
||||
trim_end_newline: false,
|
||||
};
|
||||
);
|
||||
|
||||
let exts_opt: Option<Vec<String>> = if raw {
|
||||
None
|
||||
} else {
|
||||
@ -184,9 +174,9 @@ impl Command for Open {
|
||||
let decl = engine_state.get_decl(converter_id);
|
||||
let command_output = if let Some(block_id) = decl.get_block_id() {
|
||||
let block = engine_state.get_block(block_id);
|
||||
eval_block(engine_state, stack, block, file_contents)
|
||||
eval_block(engine_state, stack, block, stream)
|
||||
} else {
|
||||
decl.run(engine_state, stack, &Call::new(call_span), file_contents)
|
||||
decl.run(engine_state, stack, &Call::new(call_span), stream)
|
||||
};
|
||||
output.push(command_output.map_err(|inner| {
|
||||
ShellError::GenericError{
|
||||
@ -198,7 +188,7 @@ impl Command for Open {
|
||||
}
|
||||
})?);
|
||||
}
|
||||
None => output.push(file_contents),
|
||||
None => output.push(stream),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,12 +5,15 @@ use nu_engine::{command_prelude::*, current_dir};
|
||||
use nu_path::expand_path_with;
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression},
|
||||
DataSource, OutDest, PipelineMetadata, RawStream,
|
||||
byte_stream::copy_with_interrupt,
|
||||
process::ChildPipe,
|
||||
ByteStreamSource, DataSource, OutDest, PipelineMetadata,
|
||||
};
|
||||
use std::{
|
||||
fs::File,
|
||||
io::Write,
|
||||
io::{self, BufRead, BufReader, Read, Write},
|
||||
path::{Path, PathBuf},
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
thread,
|
||||
};
|
||||
|
||||
@ -104,12 +107,7 @@ impl Command for Save {
|
||||
});
|
||||
|
||||
match input {
|
||||
PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr,
|
||||
metadata,
|
||||
..
|
||||
} => {
|
||||
PipelineData::ByteStream(stream, metadata) => {
|
||||
check_saving_to_source_file(metadata.as_ref(), &path, stderr_path.as_ref())?;
|
||||
|
||||
let (file, stderr_file) = get_files(
|
||||
@ -121,40 +119,97 @@ impl Command for Save {
|
||||
force,
|
||||
)?;
|
||||
|
||||
match (stdout, stderr) {
|
||||
(Some(stdout), stderr) => {
|
||||
// delegate a thread to redirect stderr to result.
|
||||
let handler = stderr
|
||||
.map(|stderr| match stderr_file {
|
||||
Some(stderr_file) => thread::Builder::new()
|
||||
.name("stderr redirector".to_string())
|
||||
.spawn(move || {
|
||||
stream_to_file(stderr, stderr_file, span, progress)
|
||||
}),
|
||||
None => thread::Builder::new()
|
||||
.name("stderr redirector".to_string())
|
||||
.spawn(move || stderr.drain()),
|
||||
})
|
||||
.transpose()
|
||||
.err_span(span)?;
|
||||
let size = stream.known_size();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
let res = stream_to_file(stdout, file, span, progress);
|
||||
if let Some(h) = handler {
|
||||
h.join().map_err(|err| ShellError::ExternalCommand {
|
||||
label: "Fail to receive external commands stderr message"
|
||||
.to_string(),
|
||||
help: format!("{err:?}"),
|
||||
span,
|
||||
})??;
|
||||
}
|
||||
res?;
|
||||
match stream.into_source() {
|
||||
ByteStreamSource::Read(read) => {
|
||||
stream_to_file(read, size, ctrlc, file, span, progress)?;
|
||||
}
|
||||
(None, Some(stderr)) => match stderr_file {
|
||||
Some(stderr_file) => stream_to_file(stderr, stderr_file, span, progress)?,
|
||||
None => stderr.drain()?,
|
||||
},
|
||||
(None, None) => {}
|
||||
};
|
||||
ByteStreamSource::File(source) => {
|
||||
stream_to_file(source, size, ctrlc, file, span, progress)?;
|
||||
}
|
||||
ByteStreamSource::Child(mut child) => {
|
||||
fn write_or_consume_stderr(
|
||||
stderr: ChildPipe,
|
||||
file: Option<File>,
|
||||
span: Span,
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
progress: bool,
|
||||
) -> Result<(), ShellError> {
|
||||
if let Some(file) = file {
|
||||
match stderr {
|
||||
ChildPipe::Pipe(pipe) => {
|
||||
stream_to_file(pipe, None, ctrlc, file, span, progress)
|
||||
}
|
||||
ChildPipe::Tee(tee) => {
|
||||
stream_to_file(tee, None, ctrlc, file, span, progress)
|
||||
}
|
||||
}?
|
||||
} else {
|
||||
match stderr {
|
||||
ChildPipe::Pipe(mut pipe) => {
|
||||
io::copy(&mut pipe, &mut io::sink())
|
||||
}
|
||||
ChildPipe::Tee(mut tee) => io::copy(&mut tee, &mut io::sink()),
|
||||
}
|
||||
.err_span(span)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
match (child.stdout.take(), child.stderr.take()) {
|
||||
(Some(stdout), stderr) => {
|
||||
// delegate a thread to redirect stderr to result.
|
||||
let handler = stderr
|
||||
.map(|stderr| {
|
||||
let ctrlc = ctrlc.clone();
|
||||
thread::Builder::new().name("stderr saver".into()).spawn(
|
||||
move || {
|
||||
write_or_consume_stderr(
|
||||
stderr,
|
||||
stderr_file,
|
||||
span,
|
||||
ctrlc,
|
||||
progress,
|
||||
)
|
||||
},
|
||||
)
|
||||
})
|
||||
.transpose()
|
||||
.err_span(span)?;
|
||||
|
||||
let res = match stdout {
|
||||
ChildPipe::Pipe(pipe) => {
|
||||
stream_to_file(pipe, None, ctrlc, file, span, progress)
|
||||
}
|
||||
ChildPipe::Tee(tee) => {
|
||||
stream_to_file(tee, None, ctrlc, file, span, progress)
|
||||
}
|
||||
};
|
||||
if let Some(h) = handler {
|
||||
h.join().map_err(|err| ShellError::ExternalCommand {
|
||||
label: "Fail to receive external commands stderr message"
|
||||
.to_string(),
|
||||
help: format!("{err:?}"),
|
||||
span,
|
||||
})??;
|
||||
}
|
||||
res?;
|
||||
}
|
||||
(None, Some(stderr)) => {
|
||||
write_or_consume_stderr(
|
||||
stderr,
|
||||
stderr_file,
|
||||
span,
|
||||
ctrlc,
|
||||
progress,
|
||||
)?;
|
||||
}
|
||||
(None, None) => {}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
@ -302,8 +357,7 @@ fn input_to_bytes(
|
||||
) -> Result<Vec<u8>, ShellError> {
|
||||
let ext = if raw {
|
||||
None
|
||||
// if is extern stream , in other words , not value
|
||||
} else if let PipelineData::ExternalStream { .. } = input {
|
||||
} else if let PipelineData::ByteStream(..) = input {
|
||||
None
|
||||
} else if let PipelineData::Value(Value::String { .. }, ..) = input {
|
||||
None
|
||||
@ -318,7 +372,7 @@ fn input_to_bytes(
|
||||
input
|
||||
};
|
||||
|
||||
value_to_bytes(input.into_value(span))
|
||||
value_to_bytes(input.into_value(span)?)
|
||||
}
|
||||
|
||||
/// Convert given data into content of file of specified extension if
|
||||
@ -448,84 +502,54 @@ fn get_files(
|
||||
}
|
||||
|
||||
fn stream_to_file(
|
||||
mut stream: RawStream,
|
||||
mut source: impl Read,
|
||||
known_size: Option<u64>,
|
||||
ctrlc: Option<Arc<AtomicBool>>,
|
||||
mut file: File,
|
||||
span: Span,
|
||||
progress: bool,
|
||||
) -> Result<(), ShellError> {
|
||||
// https://github.com/nushell/nushell/pull/9377 contains the reason
|
||||
// for not using BufWriter<File>
|
||||
let writer = &mut file;
|
||||
// https://github.com/nushell/nushell/pull/9377 contains the reason for not using `BufWriter`
|
||||
if progress {
|
||||
let mut bytes_processed = 0;
|
||||
|
||||
let mut bytes_processed: u64 = 0;
|
||||
let bytes_processed_p = &mut bytes_processed;
|
||||
let file_total_size = stream.known_size;
|
||||
let mut process_failed = false;
|
||||
let process_failed_p = &mut process_failed;
|
||||
let mut bar = progress_bar::NuProgressBar::new(known_size);
|
||||
|
||||
// Create the progress bar
|
||||
// It looks a bit messy but I am doing it this way to avoid
|
||||
// creating the bar when is not needed
|
||||
let (mut bar_opt, bar_opt_clone) = if progress {
|
||||
let tmp_bar = progress_bar::NuProgressBar::new(file_total_size);
|
||||
let tmp_bar_clone = tmp_bar.clone();
|
||||
// TODO: reduce the number of progress bar updates?
|
||||
|
||||
(Some(tmp_bar), Some(tmp_bar_clone))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
let mut reader = BufReader::new(source);
|
||||
|
||||
stream.try_for_each(move |result| {
|
||||
let buf = match result {
|
||||
Ok(v) => match v {
|
||||
Value::String { val, .. } => val.into_bytes(),
|
||||
Value::Binary { val, .. } => val,
|
||||
// Propagate errors by explicitly matching them before the final case.
|
||||
Value::Error { error, .. } => return Err(*error),
|
||||
other => {
|
||||
return Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string or binary".into(),
|
||||
wrong_type: other.get_type().to_string(),
|
||||
dst_span: span,
|
||||
src_span: other.span(),
|
||||
});
|
||||
let res = loop {
|
||||
if nu_utils::ctrl_c::was_pressed(&ctrlc) {
|
||||
bar.abandoned_msg("# Cancelled #".to_owned());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match reader.fill_buf() {
|
||||
Ok(&[]) => break Ok(()),
|
||||
Ok(buf) => {
|
||||
file.write_all(buf).err_span(span)?;
|
||||
let len = buf.len();
|
||||
reader.consume(len);
|
||||
bytes_processed += len as u64;
|
||||
bar.update_bar(bytes_processed);
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
*process_failed_p = true;
|
||||
return Err(err);
|
||||
Err(e) if e.kind() == io::ErrorKind::Interrupted => continue,
|
||||
Err(e) => break Err(e),
|
||||
}
|
||||
};
|
||||
|
||||
// If the `progress` flag is set then
|
||||
if progress {
|
||||
// Update the total amount of bytes that has been saved and then print the progress bar
|
||||
*bytes_processed_p += buf.len() as u64;
|
||||
if let Some(bar) = &mut bar_opt {
|
||||
bar.update_bar(*bytes_processed_p);
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(err) = writer.write_all(&buf) {
|
||||
*process_failed_p = true;
|
||||
return Err(ShellError::IOError {
|
||||
msg: err.to_string(),
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// If the `progress` flag is set then
|
||||
if progress {
|
||||
// If the process failed, stop the progress bar with an error message.
|
||||
if process_failed {
|
||||
if let Some(bar) = bar_opt_clone {
|
||||
bar.abandoned_msg("# Error while saving #".to_owned());
|
||||
}
|
||||
if let Err(err) = res {
|
||||
let _ = file.flush();
|
||||
bar.abandoned_msg("# Error while saving #".to_owned());
|
||||
Err(err.into_spanned(span).into())
|
||||
} else {
|
||||
file.flush().err_span(span)?;
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
copy_with_interrupt(&mut source, &mut file, span, ctrlc.as_deref())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
file.flush()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -125,13 +125,11 @@ fn getcol(head: Span, input: PipelineData) -> Result<PipelineData, ShellError> {
|
||||
.into_pipeline_data()
|
||||
.set_metadata(metadata))
|
||||
}
|
||||
PipelineData::ExternalStream { .. } => Err(ShellError::OnlySupportsThisInputType {
|
||||
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "record or table".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
wrong_type: "byte stream".into(),
|
||||
dst_span: head,
|
||||
src_span: input
|
||||
.span()
|
||||
.expect("PipelineData::ExternalStream had no span"),
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -133,11 +133,11 @@ fn drop_cols(
|
||||
}
|
||||
}
|
||||
PipelineData::Empty => Ok(PipelineData::Empty),
|
||||
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType {
|
||||
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "table or record".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
wrong_type: "byte stream".into(),
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -129,7 +129,9 @@ with 'transpose' first."#
|
||||
}
|
||||
Some(Value::list(vals, span))
|
||||
}
|
||||
Ok(data) => Some(data.into_value(head)),
|
||||
Ok(data) => Some(data.into_value(head).unwrap_or_else(|err| {
|
||||
Value::error(chain_error_with_input(err, is_error, span), span)
|
||||
})),
|
||||
Err(ShellError::Continue { span }) => Some(Value::nothing(span)),
|
||||
Err(ShellError::Break { .. }) => None,
|
||||
Err(error) => {
|
||||
@ -140,37 +142,39 @@ with 'transpose' first."#
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(stream
|
||||
.into_iter()
|
||||
.map_while(move |value| {
|
||||
let value = match value {
|
||||
Ok(value) => value,
|
||||
Err(ShellError::Continue { span }) => {
|
||||
return Some(Value::nothing(span))
|
||||
}
|
||||
Err(ShellError::Break { .. }) => return None,
|
||||
Err(err) => return Some(Value::error(err, head)),
|
||||
};
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
if let Some(chunks) = stream.chunks() {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(chunks
|
||||
.map_while(move |value| {
|
||||
let value = match value {
|
||||
Ok(value) => value,
|
||||
Err(ShellError::Continue { span }) => {
|
||||
return Some(Value::nothing(span))
|
||||
}
|
||||
Err(ShellError::Break { .. }) => return None,
|
||||
Err(err) => return Some(Value::error(err, head)),
|
||||
};
|
||||
|
||||
let span = value.span();
|
||||
let is_error = value.is_error();
|
||||
match closure.run_with_value(value) {
|
||||
Ok(data) => Some(data.into_value(head)),
|
||||
Err(ShellError::Continue { span }) => Some(Value::nothing(span)),
|
||||
Err(ShellError::Break { .. }) => None,
|
||||
Err(error) => {
|
||||
let error = chain_error_with_input(error, is_error, span);
|
||||
Some(Value::error(error, span))
|
||||
let span = value.span();
|
||||
let is_error = value.is_error();
|
||||
match closure
|
||||
.run_with_value(value)
|
||||
.and_then(|data| data.into_value(head))
|
||||
{
|
||||
Ok(value) => Some(value),
|
||||
Err(ShellError::Continue { span }) => Some(Value::nothing(span)),
|
||||
Err(ShellError::Break { .. }) => None,
|
||||
Err(error) => {
|
||||
let error = chain_error_with_input(error, is_error, span);
|
||||
Some(Value::error(error, span))
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
}
|
||||
// This match allows non-iterables to be accepted,
|
||||
// which is currently considered undesirable (Nov 2022).
|
||||
|
@ -1,4 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use std::io::Read;
|
||||
|
||||
pub fn empty(
|
||||
engine_state: &EngineState,
|
||||
@ -36,29 +37,26 @@ pub fn empty(
|
||||
} else {
|
||||
match input {
|
||||
PipelineData::Empty => Ok(PipelineData::Empty),
|
||||
PipelineData::ExternalStream { stdout, .. } => match stdout {
|
||||
Some(s) => {
|
||||
let bytes = s.into_bytes();
|
||||
|
||||
match bytes {
|
||||
Ok(s) => {
|
||||
if negate {
|
||||
Ok(Value::bool(!s.item.is_empty(), head).into_pipeline_data())
|
||||
} else {
|
||||
Ok(Value::bool(s.item.is_empty(), head).into_pipeline_data())
|
||||
}
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
let span = stream.span();
|
||||
match stream.reader() {
|
||||
Some(reader) => {
|
||||
let is_empty = reader.bytes().next().transpose().err_span(span)?.is_none();
|
||||
if negate {
|
||||
Ok(Value::bool(!is_empty, head).into_pipeline_data())
|
||||
} else {
|
||||
Ok(Value::bool(is_empty, head).into_pipeline_data())
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if negate {
|
||||
Ok(Value::bool(false, head).into_pipeline_data())
|
||||
} else {
|
||||
Ok(Value::bool(true, head).into_pipeline_data())
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if negate {
|
||||
Ok(Value::bool(false, head).into_pipeline_data())
|
||||
} else {
|
||||
Ok(Value::bool(true, head).into_pipeline_data())
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
PipelineData::ListStream(s, ..) => {
|
||||
let empty = s.into_iter().next().is_none();
|
||||
if negate {
|
||||
|
@ -58,33 +58,13 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
|
||||
| PipelineData::ListStream(..) => {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(input
|
||||
.into_iter()
|
||||
.filter_map(move |value| match closure.run_with_value(value.clone()) {
|
||||
Ok(pred) => pred.into_value(head).is_true().then_some(value),
|
||||
Err(err) => {
|
||||
let span = value.span();
|
||||
let err = chain_error_with_input(err, value.is_error(), span);
|
||||
Some(Value::error(err, span))
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(stream
|
||||
.into_iter()
|
||||
.filter_map(move |value| {
|
||||
let value = match value {
|
||||
Ok(value) => value,
|
||||
Err(err) => return Some(Value::error(err, head)),
|
||||
};
|
||||
|
||||
match closure.run_with_value(value.clone()) {
|
||||
Ok(pred) => pred.into_value(head).is_true().then_some(value),
|
||||
match closure
|
||||
.run_with_value(value.clone())
|
||||
.and_then(|data| data.into_value(head))
|
||||
{
|
||||
Ok(cond) => cond.is_true().then_some(value),
|
||||
Err(err) => {
|
||||
let span = value.span();
|
||||
let err = chain_error_with_input(err, value.is_error(), span);
|
||||
@ -94,14 +74,43 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
if let Some(chunks) = stream.chunks() {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(chunks
|
||||
.into_iter()
|
||||
.filter_map(move |value| {
|
||||
let value = match value {
|
||||
Ok(value) => value,
|
||||
Err(err) => return Some(Value::error(err, head)),
|
||||
};
|
||||
|
||||
match closure
|
||||
.run_with_value(value.clone())
|
||||
.and_then(|data| data.into_value(head))
|
||||
{
|
||||
Ok(cond) => cond.is_true().then_some(value),
|
||||
Err(err) => {
|
||||
let span = value.span();
|
||||
let err = chain_error_with_input(err, value.is_error(), span);
|
||||
Some(Value::error(err, span))
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
}
|
||||
// This match allows non-iterables to be accepted,
|
||||
// which is currently considered undesirable (Nov 2022).
|
||||
PipelineData::Value(value, ..) => {
|
||||
let result = ClosureEvalOnce::new(engine_state, stack, closure)
|
||||
.run_with_value(value.clone());
|
||||
.run_with_value(value.clone())
|
||||
.and_then(|data| data.into_value(head));
|
||||
|
||||
Ok(match result {
|
||||
Ok(pred) => pred.into_value(head).is_true().then_some(value),
|
||||
Ok(cond) => cond.is_true().then_some(value),
|
||||
Err(err) => {
|
||||
let span = value.span();
|
||||
let err = chain_error_with_input(err, value.is_error(), span);
|
||||
|
@ -447,57 +447,35 @@ fn find_with_rest_and_highlight(
|
||||
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
}
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
let mut output: Vec<Value> = vec![];
|
||||
for filter_val in stream {
|
||||
match filter_val {
|
||||
Ok(value) => {
|
||||
let span = value.span();
|
||||
match value {
|
||||
Value::String { val, .. } => {
|
||||
let split_char = if val.contains("\r\n") { "\r\n" } else { "\n" };
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
let span = stream.span();
|
||||
if let Some(lines) = stream.lines() {
|
||||
let terms = lower_terms
|
||||
.into_iter()
|
||||
.map(|term| term.to_expanded_string("", &filter_config).to_lowercase())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for line in val.split(split_char) {
|
||||
for term in lower_terms.iter() {
|
||||
let term_str = term.to_expanded_string("", &filter_config);
|
||||
let lower_val = line.to_lowercase();
|
||||
if lower_val.contains(
|
||||
&term.to_expanded_string("", &config).to_lowercase(),
|
||||
) {
|
||||
output.push(Value::string(
|
||||
highlight_search_string(
|
||||
line,
|
||||
&term_str,
|
||||
&string_style,
|
||||
&highlight_style,
|
||||
)?,
|
||||
span,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Propagate errors by explicitly matching them before the final case.
|
||||
Value::Error { error, .. } => return Err(*error),
|
||||
other => {
|
||||
return Err(ShellError::UnsupportedInput {
|
||||
msg: "unsupported type from raw stream".into(),
|
||||
input: format!("input: {:?}", other.get_type()),
|
||||
msg_span: span,
|
||||
input_span: other.span(),
|
||||
});
|
||||
}
|
||||
let mut output: Vec<Value> = vec![];
|
||||
for line in lines {
|
||||
let line = line?.to_lowercase();
|
||||
for term in &terms {
|
||||
if line.contains(term) {
|
||||
output.push(Value::string(
|
||||
highlight_search_string(
|
||||
&line,
|
||||
term,
|
||||
&string_style,
|
||||
&highlight_style,
|
||||
)?,
|
||||
span,
|
||||
))
|
||||
}
|
||||
}
|
||||
// Propagate any errors that were in the stream
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
}
|
||||
Ok(Value::list(output, span).into_pipeline_data())
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
Ok(output.into_pipeline_data(span, ctrlc))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -170,11 +170,11 @@ fn first_helper(
|
||||
))
|
||||
}
|
||||
}
|
||||
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType {
|
||||
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
wrong_type: "byte stream".into(),
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
|
@ -81,7 +81,7 @@ If multiple cell paths are given, this will produce a list of values."#
|
||||
|
||||
let paths = std::iter::once(cell_path).chain(rest);
|
||||
|
||||
let input = input.into_value(span);
|
||||
let input = input.into_value(span)?;
|
||||
|
||||
for path in paths {
|
||||
let val = input.clone().follow_cell_path(&path.members, !sensitive);
|
||||
|
@ -207,7 +207,7 @@ fn group_closure(
|
||||
for value in values {
|
||||
let key = closure
|
||||
.run_with_value(value.clone())?
|
||||
.into_value(span)
|
||||
.into_value(span)?
|
||||
.coerce_into_string()?;
|
||||
|
||||
groups.entry(key).or_default().push(value);
|
||||
|
@ -66,7 +66,7 @@ impl Command for Headers {
|
||||
let config = engine_state.get_config();
|
||||
let metadata = input.metadata();
|
||||
let span = input.span().unwrap_or(call.head);
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
let Value::List { vals: table, .. } = value else {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: "not a table".to_string(),
|
||||
|
@ -190,7 +190,7 @@ fn insert(
|
||||
let value = value.unwrap_or(Value::nothing(head));
|
||||
let new_value = ClosureEvalOnce::new(engine_state, stack, *val)
|
||||
.run_with_value(value.clone())?
|
||||
.into_value(head);
|
||||
.into_value(head)?;
|
||||
|
||||
pre_elems.push(new_value);
|
||||
if !end_of_stream {
|
||||
@ -261,8 +261,8 @@ fn insert(
|
||||
type_name: "empty pipeline".to_string(),
|
||||
span: head,
|
||||
}),
|
||||
PipelineData::ExternalStream { .. } => Err(ShellError::IncompatiblePathAccess {
|
||||
type_name: "external stream".to_string(),
|
||||
PipelineData::ByteStream(..) => Err(ShellError::IncompatiblePathAccess {
|
||||
type_name: "byte stream".to_string(),
|
||||
span: head,
|
||||
}),
|
||||
}
|
||||
@ -284,7 +284,7 @@ fn insert_value_by_closure(
|
||||
value.clone()
|
||||
};
|
||||
|
||||
let new_value = closure.run_with_value(value_at_path)?.into_value(span);
|
||||
let new_value = closure.run_with_value(value_at_path)?.into_value(span)?;
|
||||
value.insert_data_at_cell_path(cell_path, new_value, span)
|
||||
}
|
||||
|
||||
@ -304,7 +304,7 @@ fn insert_single_value_by_closure(
|
||||
value.clone()
|
||||
};
|
||||
|
||||
let new_value = closure.run_with_value(value_at_path)?.into_value(span);
|
||||
let new_value = closure.run_with_value(value_at_path)?.into_value(span)?;
|
||||
value.insert_data_at_cell_path(cell_path, new_value, span)
|
||||
}
|
||||
|
||||
|
@ -55,10 +55,11 @@ impl Command for Items {
|
||||
let result = closure
|
||||
.add_arg(Value::string(col, span))
|
||||
.add_arg(val)
|
||||
.run_with_input(PipelineData::Empty);
|
||||
.run_with_input(PipelineData::Empty)
|
||||
.and_then(|data| data.into_value(head));
|
||||
|
||||
match result {
|
||||
Ok(data) => Some(data.into_value(head)),
|
||||
Ok(value) => Some(value),
|
||||
Err(ShellError::Break { .. }) => None,
|
||||
Err(err) => {
|
||||
let err = chain_error_with_input(err, false, span);
|
||||
@ -77,20 +78,18 @@ impl Command for Items {
|
||||
}),
|
||||
}
|
||||
}
|
||||
PipelineData::ListStream(..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
PipelineData::ListStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "record".into(),
|
||||
wrong_type: "stream".into(),
|
||||
dst_span: head,
|
||||
src_span: head,
|
||||
dst_span: call.head,
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "record".into(),
|
||||
wrong_type: "byte stream".into(),
|
||||
dst_span: call.head,
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
PipelineData::ExternalStream { span, .. } => {
|
||||
Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "record".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
})
|
||||
}
|
||||
}
|
||||
.map(|data| data.set_metadata(metadata))
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ impl Command for Join {
|
||||
let join_type = join_type(engine_state, stack, call)?;
|
||||
|
||||
// FIXME: we should handle ListStreams properly instead of collecting
|
||||
let collected_input = input.into_value(span);
|
||||
let collected_input = input.into_value(span)?;
|
||||
|
||||
match (&collected_input, &table_2, &l_on, &r_on) {
|
||||
(
|
||||
|
@ -160,14 +160,12 @@ impl Command for Last {
|
||||
}),
|
||||
}
|
||||
}
|
||||
PipelineData::ExternalStream { span, .. } => {
|
||||
Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
})
|
||||
}
|
||||
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: "byte stream".into(),
|
||||
dst_span: head,
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: "null".into(),
|
||||
|
@ -1,6 +1,4 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::RawStream;
|
||||
use std::collections::VecDeque;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Lines;
|
||||
@ -33,23 +31,33 @@ impl Command for Lines {
|
||||
|
||||
let span = input.span().unwrap_or(call.head);
|
||||
match input {
|
||||
PipelineData::Value(Value::String { val, .. }, ..) => {
|
||||
let lines = if skip_empty {
|
||||
val.lines()
|
||||
.filter_map(|s| {
|
||||
if s.trim().is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Value::string(s, span))
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
val.lines().map(|s| Value::string(s, span)).collect()
|
||||
};
|
||||
PipelineData::Value(value, ..) => match value {
|
||||
Value::String { val, .. } => {
|
||||
let lines = if skip_empty {
|
||||
val.lines()
|
||||
.filter_map(|s| {
|
||||
if s.trim().is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Value::string(s, span))
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
val.lines().map(|s| Value::string(s, span)).collect()
|
||||
};
|
||||
|
||||
Ok(Value::list(lines, span).into_pipeline_data())
|
||||
}
|
||||
Ok(Value::list(lines, span).into_pipeline_data())
|
||||
}
|
||||
// Propagate existing errors
|
||||
Value::Error { error, .. } => Err(*error),
|
||||
value => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string or byte stream".into(),
|
||||
wrong_type: value.get_type().to_string(),
|
||||
dst_span: head,
|
||||
src_span: value.span(),
|
||||
}),
|
||||
},
|
||||
PipelineData::Empty => Ok(PipelineData::Empty),
|
||||
PipelineData::ListStream(stream, metadata) => {
|
||||
let stream = stream.modify(|iter| {
|
||||
@ -76,27 +84,18 @@ impl Command for Lines {
|
||||
|
||||
Ok(PipelineData::ListStream(stream, metadata))
|
||||
}
|
||||
PipelineData::Value(val, ..) => {
|
||||
match val {
|
||||
// Propagate existing errors
|
||||
Value::Error { error, .. } => Err(*error),
|
||||
_ => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string or raw data".into(),
|
||||
wrong_type: val.get_type().to_string(),
|
||||
dst_span: head,
|
||||
src_span: val.span(),
|
||||
}),
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
if let Some(lines) = stream.lines() {
|
||||
Ok(lines
|
||||
.map(move |line| match line {
|
||||
Ok(line) => Value::string(line, head),
|
||||
Err(err) => Value::error(err, head),
|
||||
})
|
||||
.into_pipeline_data(head, ctrlc))
|
||||
} else {
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
}
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
metadata,
|
||||
..
|
||||
} => Ok(RawStreamLinesAdapter::new(stream, head, skip_empty)
|
||||
.map(move |x| x.unwrap_or_else(|err| Value::error(err, head)))
|
||||
.into_pipeline_data(head, ctrlc)
|
||||
.set_metadata(metadata)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -112,108 +111,6 @@ impl Command for Lines {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct RawStreamLinesAdapter {
|
||||
inner: RawStream,
|
||||
inner_complete: bool,
|
||||
skip_empty: bool,
|
||||
span: Span,
|
||||
incomplete_line: String,
|
||||
queue: VecDeque<String>,
|
||||
}
|
||||
|
||||
impl Iterator for RawStreamLinesAdapter {
|
||||
type Item = Result<Value, ShellError>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
if let Some(s) = self.queue.pop_front() {
|
||||
if self.skip_empty && s.trim().is_empty() {
|
||||
continue;
|
||||
}
|
||||
return Some(Ok(Value::string(s, self.span)));
|
||||
} else {
|
||||
// inner is complete, feed out remaining state
|
||||
if self.inner_complete {
|
||||
return if self.incomplete_line.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Ok(Value::string(
|
||||
std::mem::take(&mut self.incomplete_line),
|
||||
self.span,
|
||||
)))
|
||||
};
|
||||
}
|
||||
|
||||
// pull more data from inner
|
||||
if let Some(result) = self.inner.next() {
|
||||
match result {
|
||||
Ok(v) => {
|
||||
let span = v.span();
|
||||
match v {
|
||||
// TODO: Value::Binary support required?
|
||||
Value::String { val, .. } => {
|
||||
self.span = span;
|
||||
|
||||
let mut lines = val.lines();
|
||||
|
||||
// handle incomplete line from previous
|
||||
if !self.incomplete_line.is_empty() {
|
||||
if let Some(first) = lines.next() {
|
||||
self.incomplete_line.push_str(first);
|
||||
self.queue.push_back(std::mem::take(
|
||||
&mut self.incomplete_line,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// save completed lines
|
||||
self.queue.extend(lines.map(String::from));
|
||||
|
||||
if !val.ends_with('\n') {
|
||||
// incomplete line, save for next time
|
||||
// if `val` and `incomplete_line` were empty,
|
||||
// then pop will return none
|
||||
if let Some(s) = self.queue.pop_back() {
|
||||
self.incomplete_line = s;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Propagate errors by explicitly matching them before the final case.
|
||||
Value::Error { error, .. } => return Some(Err(*error)),
|
||||
other => {
|
||||
return Some(Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string".into(),
|
||||
wrong_type: other.get_type().to_string(),
|
||||
dst_span: self.span,
|
||||
src_span: other.span(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => return Some(Err(err)),
|
||||
}
|
||||
} else {
|
||||
self.inner_complete = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RawStreamLinesAdapter {
|
||||
pub fn new(inner: RawStream, span: Span, skip_empty: bool) -> Self {
|
||||
Self {
|
||||
inner,
|
||||
span,
|
||||
skip_empty,
|
||||
incomplete_line: String::new(),
|
||||
queue: VecDeque::new(),
|
||||
inner_complete: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
@ -143,17 +143,16 @@ impl Command for ParEach {
|
||||
.map(move |(index, value)| {
|
||||
let span = value.span();
|
||||
let is_error = value.is_error();
|
||||
let result =
|
||||
let value =
|
||||
ClosureEvalOnce::new(engine_state, stack, closure.clone())
|
||||
.run_with_value(value);
|
||||
|
||||
let value = match result {
|
||||
Ok(data) => data.into_value(span),
|
||||
Err(err) => Value::error(
|
||||
chain_error_with_input(err, is_error, span),
|
||||
span,
|
||||
),
|
||||
};
|
||||
.run_with_value(value)
|
||||
.and_then(|data| data.into_value(span))
|
||||
.unwrap_or_else(|err| {
|
||||
Value::error(
|
||||
chain_error_with_input(err, is_error, span),
|
||||
span,
|
||||
)
|
||||
});
|
||||
|
||||
(index, value)
|
||||
})
|
||||
@ -170,17 +169,16 @@ impl Command for ParEach {
|
||||
.map(move |(index, value)| {
|
||||
let span = value.span();
|
||||
let is_error = value.is_error();
|
||||
let result =
|
||||
let value =
|
||||
ClosureEvalOnce::new(engine_state, stack, closure.clone())
|
||||
.run_with_value(value);
|
||||
|
||||
let value = match result {
|
||||
Ok(data) => data.into_value(span),
|
||||
Err(err) => Value::error(
|
||||
chain_error_with_input(err, is_error, span),
|
||||
span,
|
||||
),
|
||||
};
|
||||
.run_with_value(value)
|
||||
.and_then(|data| data.into_value(span))
|
||||
.unwrap_or_else(|err| {
|
||||
Value::error(
|
||||
chain_error_with_input(err, is_error, span),
|
||||
span,
|
||||
)
|
||||
});
|
||||
|
||||
(index, value)
|
||||
})
|
||||
@ -203,40 +201,12 @@ impl Command for ParEach {
|
||||
.map(move |(index, value)| {
|
||||
let span = value.span();
|
||||
let is_error = value.is_error();
|
||||
let result = ClosureEvalOnce::new(engine_state, stack, closure.clone())
|
||||
.run_with_value(value);
|
||||
|
||||
let value = match result {
|
||||
Ok(data) => data.into_value(head),
|
||||
Err(err) => {
|
||||
Value::error(chain_error_with_input(err, is_error, span), span)
|
||||
}
|
||||
};
|
||||
|
||||
(index, value)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
|
||||
})),
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => Ok(create_pool(max_threads)?.install(|| {
|
||||
let vec = stream
|
||||
.enumerate()
|
||||
.par_bridge()
|
||||
.map(move |(index, value)| {
|
||||
let value = match value {
|
||||
Ok(value) => value,
|
||||
Err(err) => return (index, Value::error(err, head)),
|
||||
};
|
||||
|
||||
let value = ClosureEvalOnce::new(engine_state, stack, closure.clone())
|
||||
.run_with_value(value)
|
||||
.map(|data| data.into_value(head))
|
||||
.unwrap_or_else(|err| Value::error(err, head));
|
||||
.and_then(|data| data.into_value(head))
|
||||
.unwrap_or_else(|err| {
|
||||
Value::error(chain_error_with_input(err, is_error, span), span)
|
||||
});
|
||||
|
||||
(index, value)
|
||||
})
|
||||
@ -244,6 +214,34 @@ impl Command for ParEach {
|
||||
|
||||
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
|
||||
})),
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
if let Some(chunks) = stream.chunks() {
|
||||
Ok(create_pool(max_threads)?.install(|| {
|
||||
let vec = chunks
|
||||
.enumerate()
|
||||
.par_bridge()
|
||||
.map(move |(index, value)| {
|
||||
let value = match value {
|
||||
Ok(value) => value,
|
||||
Err(err) => return (index, Value::error(err, head)),
|
||||
};
|
||||
|
||||
let value =
|
||||
ClosureEvalOnce::new(engine_state, stack, closure.clone())
|
||||
.run_with_value(value)
|
||||
.and_then(|data| data.into_value(head))
|
||||
.unwrap_or_else(|err| Value::error(err, head));
|
||||
|
||||
(index, value)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
apply_order(vec).into_pipeline_data(head, engine_state.ctrlc.clone())
|
||||
}))
|
||||
} else {
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
}
|
||||
}
|
||||
.and_then(|x| x.filter(|v| !v.is_nothing(), engine_state.ctrlc.clone()))
|
||||
.map(|data| data.set_metadata(metadata))
|
||||
|
@ -115,7 +115,7 @@ impl Command for Reduce {
|
||||
.add_arg(value)
|
||||
.add_arg(acc)
|
||||
.run_with_input(PipelineData::Empty)?
|
||||
.into_value(head);
|
||||
.into_value(head)?;
|
||||
}
|
||||
|
||||
Ok(acc.with_span(head).into_pipeline_data())
|
||||
|
@ -173,7 +173,7 @@ fn reject(
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let mut unique_rows: HashSet<usize> = HashSet::new();
|
||||
let metadata = input.metadata();
|
||||
let val = input.into_value(span);
|
||||
let val = input.into_value(span)?;
|
||||
let mut val = val;
|
||||
let mut new_columns = vec![];
|
||||
let mut new_rows = vec![];
|
||||
|
@ -87,15 +87,14 @@ impl Command for Skip {
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let input_span = input.span().unwrap_or(call.head);
|
||||
match input {
|
||||
PipelineData::ExternalStream { .. } => Err(ShellError::OnlySupportsThisInputType {
|
||||
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
wrong_type: "byte stream".into(),
|
||||
dst_span: call.head,
|
||||
src_span: input_span,
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
PipelineData::Value(Value::Binary { val, .. }, metadata) => {
|
||||
let bytes = val.into_iter().skip(n).collect::<Vec<_>>();
|
||||
|
||||
Ok(Value::binary(bytes, input_span).into_pipeline_data_with_metadata(metadata))
|
||||
}
|
||||
_ => Ok(input
|
||||
|
@ -85,7 +85,8 @@ impl Command for SkipUntil {
|
||||
.skip_while(move |value| {
|
||||
closure
|
||||
.run_with_value(value.clone())
|
||||
.map(|data| data.into_value(head).is_false())
|
||||
.and_then(|data| data.into_value(head))
|
||||
.map(|cond| cond.is_false())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
|
@ -90,7 +90,8 @@ impl Command for SkipWhile {
|
||||
.skip_while(move |value| {
|
||||
closure
|
||||
.run_with_value(value.clone())
|
||||
.map(|data| data.into_value(head).is_true())
|
||||
.and_then(|data| data.into_value(head))
|
||||
.map(|cond| cond.is_true())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
|
@ -78,14 +78,12 @@ impl Command for Take {
|
||||
stream.modify(|iter| iter.take(rows_desired)),
|
||||
metadata,
|
||||
)),
|
||||
PipelineData::ExternalStream { span, .. } => {
|
||||
Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
})
|
||||
}
|
||||
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: "byte stream".into(),
|
||||
dst_span: head,
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
PipelineData::Empty => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, binary or range".into(),
|
||||
wrong_type: "null".into(),
|
||||
|
@ -81,7 +81,8 @@ impl Command for TakeUntil {
|
||||
.take_while(move |value| {
|
||||
closure
|
||||
.run_with_value(value.clone())
|
||||
.map(|data| data.into_value(head).is_false())
|
||||
.and_then(|data| data.into_value(head))
|
||||
.map(|cond| cond.is_false())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
|
@ -81,7 +81,8 @@ impl Command for TakeWhile {
|
||||
.take_while(move |value| {
|
||||
closure
|
||||
.run_with_value(value.clone())
|
||||
.map(|data| data.into_value(head).is_true())
|
||||
.and_then(|data| data.into_value(head))
|
||||
.map(|cond| cond.is_true())
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
|
@ -1,6 +1,17 @@
|
||||
use nu_engine::{command_prelude::*, get_eval_block_with_early_return};
|
||||
use nu_protocol::{engine::Closure, OutDest, RawStream};
|
||||
use std::{sync::mpsc, thread};
|
||||
use nu_protocol::{
|
||||
byte_stream::copy_with_interrupt, engine::Closure, process::ChildPipe, ByteStream,
|
||||
ByteStreamSource, OutDest,
|
||||
};
|
||||
use std::{
|
||||
io::{self, Read, Write},
|
||||
sync::{
|
||||
atomic::AtomicBool,
|
||||
mpsc::{self, Sender},
|
||||
Arc,
|
||||
},
|
||||
thread::{self, JoinHandle},
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Tee;
|
||||
@ -67,138 +78,205 @@ use it in your pipeline."#
|
||||
let head = call.head;
|
||||
let use_stderr = call.has_flag(engine_state, stack, "stderr")?;
|
||||
|
||||
let Spanned {
|
||||
item: Closure { block_id, captures },
|
||||
span: closure_span,
|
||||
} = call.req(engine_state, stack, 0)?;
|
||||
let closure: Spanned<Closure> = call.req(engine_state, stack, 0)?;
|
||||
let closure_span = closure.span;
|
||||
let closure = closure.item;
|
||||
|
||||
let closure_engine_state = engine_state.clone();
|
||||
let mut closure_stack = stack
|
||||
.captures_to_stack_preserve_out_dest(captures)
|
||||
.reset_pipes();
|
||||
let mut eval_block = {
|
||||
let closure_engine_state = engine_state.clone();
|
||||
let mut closure_stack = stack
|
||||
.captures_to_stack_preserve_out_dest(closure.captures)
|
||||
.reset_pipes();
|
||||
let eval_block_with_early_return = get_eval_block_with_early_return(engine_state);
|
||||
|
||||
let metadata = input.metadata();
|
||||
let metadata_clone = metadata.clone();
|
||||
move |input| {
|
||||
let result = eval_block_with_early_return(
|
||||
&closure_engine_state,
|
||||
&mut closure_stack,
|
||||
closure_engine_state.get_block(closure.block_id),
|
||||
input,
|
||||
);
|
||||
// Make sure to drain any iterator produced to avoid unexpected behavior
|
||||
result.and_then(|data| data.drain().map(|_| ()))
|
||||
}
|
||||
};
|
||||
|
||||
let eval_block_with_early_return = get_eval_block_with_early_return(engine_state);
|
||||
if let PipelineData::ByteStream(stream, metadata) = input {
|
||||
let span = stream.span();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let eval_block = {
|
||||
let metadata = metadata.clone();
|
||||
move |stream| eval_block(PipelineData::ByteStream(stream, metadata))
|
||||
};
|
||||
|
||||
match input {
|
||||
// Handle external streams specially, to make sure they pass through
|
||||
PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code,
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
} => {
|
||||
let known_size = if use_stderr {
|
||||
stderr.as_ref().and_then(|s| s.known_size)
|
||||
} else {
|
||||
stdout.as_ref().and_then(|s| s.known_size)
|
||||
};
|
||||
match stream.into_source() {
|
||||
ByteStreamSource::Read(read) => {
|
||||
if use_stderr {
|
||||
return stderr_misuse(span, head);
|
||||
}
|
||||
|
||||
let with_stream = move |rx: mpsc::Receiver<Result<Vec<u8>, ShellError>>| {
|
||||
let iter = rx.into_iter();
|
||||
let input_from_channel = PipelineData::ExternalStream {
|
||||
stdout: Some(RawStream::new(
|
||||
Box::new(iter),
|
||||
closure_engine_state.ctrlc.clone(),
|
||||
span,
|
||||
known_size,
|
||||
)),
|
||||
stderr: None,
|
||||
exit_code: None,
|
||||
span,
|
||||
metadata: metadata_clone,
|
||||
trim_end_newline,
|
||||
let tee = IoTee::new(read, span, eval_block)?;
|
||||
|
||||
Ok(PipelineData::ByteStream(
|
||||
ByteStream::read(tee, span, ctrlc),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
ByteStreamSource::File(file) => {
|
||||
if use_stderr {
|
||||
return stderr_misuse(span, head);
|
||||
}
|
||||
|
||||
let tee = IoTee::new(file, span, eval_block)?;
|
||||
|
||||
Ok(PipelineData::ByteStream(
|
||||
ByteStream::read(tee, span, ctrlc),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
ByteStreamSource::Child(mut child) => {
|
||||
let stderr_thread = if use_stderr {
|
||||
let stderr_thread = if let Some(stderr) = child.stderr.take() {
|
||||
match stack.stderr() {
|
||||
OutDest::Pipe | OutDest::Capture => {
|
||||
let tee = IoTee::new(stderr, span, eval_block)?;
|
||||
child.stderr = Some(ChildPipe::Tee(Box::new(tee)));
|
||||
None
|
||||
}
|
||||
OutDest::Null => Some(tee_pipe_on_thread(
|
||||
stderr,
|
||||
io::sink(),
|
||||
span,
|
||||
ctrlc.as_ref(),
|
||||
eval_block,
|
||||
)?),
|
||||
OutDest::Inherit => Some(tee_pipe_on_thread(
|
||||
stderr,
|
||||
io::stderr(),
|
||||
span,
|
||||
ctrlc.as_ref(),
|
||||
eval_block,
|
||||
)?),
|
||||
OutDest::File(file) => Some(tee_pipe_on_thread(
|
||||
stderr,
|
||||
file.clone(),
|
||||
span,
|
||||
ctrlc.as_ref(),
|
||||
eval_block,
|
||||
)?),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(stdout) = child.stdout.take() {
|
||||
match stack.stdout() {
|
||||
OutDest::Pipe | OutDest::Capture => {
|
||||
child.stdout = Some(stdout);
|
||||
Ok(())
|
||||
}
|
||||
OutDest::Null => {
|
||||
copy_pipe(stdout, io::sink(), span, ctrlc.as_deref())
|
||||
}
|
||||
OutDest::Inherit => {
|
||||
copy_pipe(stdout, io::stdout(), span, ctrlc.as_deref())
|
||||
}
|
||||
OutDest::File(file) => {
|
||||
copy_pipe(stdout, file.as_ref(), span, ctrlc.as_deref())
|
||||
}
|
||||
}?;
|
||||
}
|
||||
|
||||
stderr_thread
|
||||
} else {
|
||||
let stderr_thread = if let Some(stderr) = child.stderr.take() {
|
||||
match stack.stderr() {
|
||||
OutDest::Pipe | OutDest::Capture => {
|
||||
child.stderr = Some(stderr);
|
||||
Ok(None)
|
||||
}
|
||||
OutDest::Null => {
|
||||
copy_pipe_on_thread(stderr, io::sink(), span, ctrlc.as_ref())
|
||||
.map(Some)
|
||||
}
|
||||
OutDest::Inherit => {
|
||||
copy_pipe_on_thread(stderr, io::stderr(), span, ctrlc.as_ref())
|
||||
.map(Some)
|
||||
}
|
||||
OutDest::File(file) => {
|
||||
copy_pipe_on_thread(stderr, file.clone(), span, ctrlc.as_ref())
|
||||
.map(Some)
|
||||
}
|
||||
}?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(stdout) = child.stdout.take() {
|
||||
match stack.stdout() {
|
||||
OutDest::Pipe | OutDest::Capture => {
|
||||
let tee = IoTee::new(stdout, span, eval_block)?;
|
||||
child.stdout = Some(ChildPipe::Tee(Box::new(tee)));
|
||||
Ok(())
|
||||
}
|
||||
OutDest::Null => {
|
||||
tee_pipe(stdout, io::sink(), span, ctrlc.as_deref(), eval_block)
|
||||
}
|
||||
OutDest::Inherit => tee_pipe(
|
||||
stdout,
|
||||
io::stdout(),
|
||||
span,
|
||||
ctrlc.as_deref(),
|
||||
eval_block,
|
||||
),
|
||||
OutDest::File(file) => tee_pipe(
|
||||
stdout,
|
||||
file.as_ref(),
|
||||
span,
|
||||
ctrlc.as_deref(),
|
||||
eval_block,
|
||||
),
|
||||
}?;
|
||||
}
|
||||
|
||||
stderr_thread
|
||||
};
|
||||
let result = eval_block_with_early_return(
|
||||
&closure_engine_state,
|
||||
&mut closure_stack,
|
||||
closure_engine_state.get_block(block_id),
|
||||
input_from_channel,
|
||||
);
|
||||
// Make sure to drain any iterator produced to avoid unexpected behavior
|
||||
result.and_then(|data| data.drain())
|
||||
};
|
||||
|
||||
if use_stderr {
|
||||
let stderr = stderr
|
||||
.map(|stderr| {
|
||||
let iter = tee(stderr.stream, with_stream).err_span(head)?;
|
||||
Ok::<_, ShellError>(RawStream::new(
|
||||
Box::new(iter.map(flatten_result)),
|
||||
stderr.ctrlc,
|
||||
stderr.span,
|
||||
stderr.known_size,
|
||||
))
|
||||
})
|
||||
.transpose()?;
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code,
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
})
|
||||
} else {
|
||||
let stdout = stdout
|
||||
.map(|stdout| {
|
||||
let iter = tee(stdout.stream, with_stream).err_span(head)?;
|
||||
Ok::<_, ShellError>(RawStream::new(
|
||||
Box::new(iter.map(flatten_result)),
|
||||
stdout.ctrlc,
|
||||
stdout.span,
|
||||
stdout.known_size,
|
||||
))
|
||||
})
|
||||
.transpose()?;
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code,
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
})
|
||||
if child.stdout.is_some() || child.stderr.is_some() {
|
||||
Ok(PipelineData::ByteStream(
|
||||
ByteStream::child(*child, span),
|
||||
metadata,
|
||||
))
|
||||
} else {
|
||||
if let Some(thread) = stderr_thread {
|
||||
thread.join().unwrap_or_else(|_| Err(panic_error()))?;
|
||||
}
|
||||
child.wait()?;
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
}
|
||||
}
|
||||
// --stderr is not allowed if the input is not an external stream
|
||||
_ if use_stderr => Err(ShellError::UnsupportedInput {
|
||||
msg: "--stderr can only be used on external streams".into(),
|
||||
input: "the input to `tee` is not an external stream".into(),
|
||||
msg_span: head,
|
||||
input_span: input.span().unwrap_or(head),
|
||||
}),
|
||||
// Handle others with the plain iterator
|
||||
_ => {
|
||||
let teed = tee(input.into_iter(), move |rx| {
|
||||
let input_from_channel = rx.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
closure_engine_state.ctrlc.clone(),
|
||||
metadata_clone,
|
||||
);
|
||||
let result = eval_block_with_early_return(
|
||||
&closure_engine_state,
|
||||
&mut closure_stack,
|
||||
closure_engine_state.get_block(block_id),
|
||||
input_from_channel,
|
||||
);
|
||||
// Make sure to drain any iterator produced to avoid unexpected behavior
|
||||
result.and_then(|data| data.drain())
|
||||
})
|
||||
.err_span(head)?
|
||||
.map(move |result| result.unwrap_or_else(|err| Value::error(err, closure_span)))
|
||||
.into_pipeline_data_with_metadata(
|
||||
head,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
);
|
||||
|
||||
Ok(teed)
|
||||
} else {
|
||||
if use_stderr {
|
||||
return stderr_misuse(input.span().unwrap_or(head), head);
|
||||
}
|
||||
|
||||
let span = input.span().unwrap_or(head);
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let metadata = input.metadata();
|
||||
let metadata_clone = metadata.clone();
|
||||
|
||||
Ok(tee(input.into_iter(), move |rx| {
|
||||
let input = rx.into_pipeline_data_with_metadata(span, ctrlc, metadata_clone);
|
||||
eval_block(input)
|
||||
})
|
||||
.err_span(call.head)?
|
||||
.map(move |result| result.unwrap_or_else(|err| Value::error(err, closure_span)))
|
||||
.into_pipeline_data_with_metadata(
|
||||
span,
|
||||
engine_state.ctrlc.clone(),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@ -213,10 +291,6 @@ fn panic_error() -> ShellError {
|
||||
}
|
||||
}
|
||||
|
||||
fn flatten_result<T, E>(result: Result<Result<T, E>, E>) -> Result<T, E> {
|
||||
result.unwrap_or_else(Err)
|
||||
}
|
||||
|
||||
/// Copies the iterator to a channel on another thread. If an error is produced on that thread,
|
||||
/// it is embedded in the resulting iterator as an `Err` as soon as possible. When the iterator
|
||||
/// finishes, it waits for the other thread to finish, also handling any error produced at that
|
||||
@ -233,7 +307,7 @@ where
|
||||
|
||||
let mut thread = Some(
|
||||
thread::Builder::new()
|
||||
.name("stderr consumer".into())
|
||||
.name("tee".into())
|
||||
.spawn(move || with_cloned_stream(rx))?,
|
||||
);
|
||||
|
||||
@ -273,6 +347,134 @@ where
|
||||
}))
|
||||
}
|
||||
|
||||
fn stderr_misuse<T>(span: Span, head: Span) -> Result<T, ShellError> {
|
||||
Err(ShellError::UnsupportedInput {
|
||||
msg: "--stderr can only be used on external commands".into(),
|
||||
input: "the input to `tee` is not an external commands".into(),
|
||||
msg_span: head,
|
||||
input_span: span,
|
||||
})
|
||||
}
|
||||
|
||||
struct IoTee<R: Read> {
|
||||
reader: R,
|
||||
sender: Option<Sender<Vec<u8>>>,
|
||||
thread: Option<JoinHandle<Result<(), ShellError>>>,
|
||||
}
|
||||
|
||||
impl<R: Read> IoTee<R> {
|
||||
fn new(
|
||||
reader: R,
|
||||
span: Span,
|
||||
eval_block: impl FnOnce(ByteStream) -> Result<(), ShellError> + Send + 'static,
|
||||
) -> Result<Self, ShellError> {
|
||||
let (sender, receiver) = mpsc::channel();
|
||||
|
||||
let thread = thread::Builder::new()
|
||||
.name("tee".into())
|
||||
.spawn(move || eval_block(ByteStream::from_iter(receiver, span, None)))
|
||||
.err_span(span)?;
|
||||
|
||||
Ok(Self {
|
||||
reader,
|
||||
sender: Some(sender),
|
||||
thread: Some(thread),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read> Read for IoTee<R> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
if let Some(thread) = self.thread.take() {
|
||||
if thread.is_finished() {
|
||||
if let Err(err) = thread.join().unwrap_or_else(|_| Err(panic_error())) {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, err));
|
||||
}
|
||||
} else {
|
||||
self.thread = Some(thread)
|
||||
}
|
||||
}
|
||||
let len = self.reader.read(buf)?;
|
||||
if len == 0 {
|
||||
self.sender = None;
|
||||
if let Some(thread) = self.thread.take() {
|
||||
if let Err(err) = thread.join().unwrap_or_else(|_| Err(panic_error())) {
|
||||
return Err(io::Error::new(io::ErrorKind::Other, err));
|
||||
}
|
||||
}
|
||||
} else if let Some(sender) = self.sender.as_mut() {
|
||||
if sender.send(buf[..len].to_vec()).is_err() {
|
||||
self.sender = None;
|
||||
}
|
||||
}
|
||||
Ok(len)
|
||||
}
|
||||
}
|
||||
|
||||
fn tee_pipe(
|
||||
pipe: ChildPipe,
|
||||
mut dest: impl Write,
|
||||
span: Span,
|
||||
ctrlc: Option<&AtomicBool>,
|
||||
eval_block: impl FnOnce(ByteStream) -> Result<(), ShellError> + Send + 'static,
|
||||
) -> Result<(), ShellError> {
|
||||
match pipe {
|
||||
ChildPipe::Pipe(pipe) => {
|
||||
let mut tee = IoTee::new(pipe, span, eval_block)?;
|
||||
copy_with_interrupt(&mut tee, &mut dest, span, ctrlc)?;
|
||||
}
|
||||
ChildPipe::Tee(tee) => {
|
||||
let mut tee = IoTee::new(tee, span, eval_block)?;
|
||||
copy_with_interrupt(&mut tee, &mut dest, span, ctrlc)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn tee_pipe_on_thread(
|
||||
pipe: ChildPipe,
|
||||
dest: impl Write + Send + 'static,
|
||||
span: Span,
|
||||
ctrlc: Option<&Arc<AtomicBool>>,
|
||||
eval_block: impl FnOnce(ByteStream) -> Result<(), ShellError> + Send + 'static,
|
||||
) -> Result<JoinHandle<Result<(), ShellError>>, ShellError> {
|
||||
let ctrlc = ctrlc.cloned();
|
||||
thread::Builder::new()
|
||||
.name("stderr tee".into())
|
||||
.spawn(move || tee_pipe(pipe, dest, span, ctrlc.as_deref(), eval_block))
|
||||
.map_err(|e| e.into_spanned(span).into())
|
||||
}
|
||||
|
||||
fn copy_pipe(
|
||||
pipe: ChildPipe,
|
||||
mut dest: impl Write,
|
||||
span: Span,
|
||||
ctrlc: Option<&AtomicBool>,
|
||||
) -> Result<(), ShellError> {
|
||||
match pipe {
|
||||
ChildPipe::Pipe(mut pipe) => {
|
||||
copy_with_interrupt(&mut pipe, &mut dest, span, ctrlc)?;
|
||||
}
|
||||
ChildPipe::Tee(mut tee) => {
|
||||
copy_with_interrupt(&mut tee, &mut dest, span, ctrlc)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn copy_pipe_on_thread(
|
||||
pipe: ChildPipe,
|
||||
dest: impl Write + Send + 'static,
|
||||
span: Span,
|
||||
ctrlc: Option<&Arc<AtomicBool>>,
|
||||
) -> Result<JoinHandle<Result<(), ShellError>>, ShellError> {
|
||||
let ctrlc = ctrlc.cloned();
|
||||
thread::Builder::new()
|
||||
.name("stderr copier".into())
|
||||
.spawn(move || copy_pipe(pipe, dest, span, ctrlc.as_deref()))
|
||||
.map_err(|e| e.into_spanned(span).into())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tee_copies_values_to_other_thread_and_passes_them_through() {
|
||||
let (tx, rx) = mpsc::channel();
|
||||
|
@ -225,8 +225,8 @@ fn update(
|
||||
type_name: "empty pipeline".to_string(),
|
||||
span: head,
|
||||
}),
|
||||
PipelineData::ExternalStream { .. } => Err(ShellError::IncompatiblePathAccess {
|
||||
type_name: "external stream".to_string(),
|
||||
PipelineData::ByteStream(..) => Err(ShellError::IncompatiblePathAccess {
|
||||
type_name: "byte stream".to_string(),
|
||||
span: head,
|
||||
}),
|
||||
}
|
||||
@ -250,7 +250,7 @@ fn update_value_by_closure(
|
||||
let new_value = closure
|
||||
.add_arg(arg.clone())
|
||||
.run_with_input(value_at_path.into_pipeline_data())?
|
||||
.into_value(span);
|
||||
.into_value(span)?;
|
||||
|
||||
value.update_data_at_cell_path(cell_path, new_value)
|
||||
}
|
||||
@ -273,7 +273,7 @@ fn update_single_value_by_closure(
|
||||
let new_value = closure
|
||||
.add_arg(arg.clone())
|
||||
.run_with_input(value_at_path.into_pipeline_data())?
|
||||
.into_value(span);
|
||||
.into_value(span)?;
|
||||
|
||||
value.update_data_at_cell_path(cell_path, new_value)
|
||||
}
|
||||
|
@ -218,7 +218,7 @@ fn upsert(
|
||||
if let Value::Closure { val, .. } = replacement {
|
||||
ClosureEvalOnce::new(engine_state, stack, *val)
|
||||
.run_with_value(value)?
|
||||
.into_value(head)
|
||||
.into_value(head)?
|
||||
} else {
|
||||
replacement
|
||||
}
|
||||
@ -285,8 +285,8 @@ fn upsert(
|
||||
type_name: "empty pipeline".to_string(),
|
||||
span: head,
|
||||
}),
|
||||
PipelineData::ExternalStream { .. } => Err(ShellError::IncompatiblePathAccess {
|
||||
type_name: "external stream".to_string(),
|
||||
PipelineData::ByteStream(..) => Err(ShellError::IncompatiblePathAccess {
|
||||
type_name: "byte stream".to_string(),
|
||||
span: head,
|
||||
}),
|
||||
}
|
||||
@ -311,7 +311,11 @@ fn upsert_value_by_closure(
|
||||
.map(IntoPipelineData::into_pipeline_data)
|
||||
.unwrap_or(PipelineData::Empty);
|
||||
|
||||
let new_value = closure.add_arg(arg).run_with_input(input)?.into_value(span);
|
||||
let new_value = closure
|
||||
.add_arg(arg)
|
||||
.run_with_input(input)?
|
||||
.into_value(span)?;
|
||||
|
||||
value.upsert_data_at_cell_path(cell_path, new_value)
|
||||
}
|
||||
|
||||
@ -334,7 +338,11 @@ fn upsert_single_value_by_closure(
|
||||
.map(IntoPipelineData::into_pipeline_data)
|
||||
.unwrap_or(PipelineData::Empty);
|
||||
|
||||
let new_value = closure.add_arg(arg).run_with_input(input)?.into_value(span);
|
||||
let new_value = closure
|
||||
.add_arg(arg)
|
||||
.run_with_input(input)?
|
||||
.into_value(span)?;
|
||||
|
||||
value.upsert_data_at_cell_path(cell_path, new_value)
|
||||
}
|
||||
|
||||
|
@ -36,7 +36,7 @@ pub fn boolean_fold(
|
||||
break;
|
||||
}
|
||||
|
||||
let pred = closure.run_with_value(value)?.into_value(head).is_true();
|
||||
let pred = closure.run_with_value(value)?.into_value(head)?.is_true();
|
||||
|
||||
if pred == accumulator {
|
||||
return Ok(Value::bool(accumulator, head).into_pipeline_data());
|
||||
|
@ -180,13 +180,11 @@ fn values(
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
PipelineData::ExternalStream { .. } => Err(ShellError::OnlySupportsThisInputType {
|
||||
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "record or table".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
wrong_type: "byte stream".into(),
|
||||
dst_span: head,
|
||||
src_span: input
|
||||
.span()
|
||||
.expect("PipelineData::ExternalStream had no span"),
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -57,9 +57,14 @@ not supported."#
|
||||
let metadata = input.metadata();
|
||||
Ok(input
|
||||
.into_iter_strict(head)?
|
||||
.filter_map(move |value| match closure.run_with_value(value.clone()) {
|
||||
Ok(data) => data.into_value(head).is_true().then_some(value),
|
||||
Err(err) => Some(Value::error(err, head)),
|
||||
.filter_map(move |value| {
|
||||
match closure
|
||||
.run_with_value(value.clone())
|
||||
.and_then(|data| data.into_value(head))
|
||||
{
|
||||
Ok(cond) => cond.is_true().then_some(value),
|
||||
Err(err) => Some(Value::error(err, head)),
|
||||
}
|
||||
})
|
||||
.into_pipeline_data_with_metadata(head, engine_state.ctrlc.clone(), metadata))
|
||||
}
|
||||
|
@ -43,8 +43,8 @@ impl Command for Wrap {
|
||||
.into_iter()
|
||||
.map(move |x| Value::record(record! { name.clone() => x }, span))
|
||||
.into_pipeline_data_with_metadata(span, engine_state.ctrlc.clone(), metadata)),
|
||||
PipelineData::ExternalStream { .. } => Ok(Value::record(
|
||||
record! { name => input.into_value(span) },
|
||||
PipelineData::ByteStream(stream, ..) => Ok(Value::record(
|
||||
record! { name => stream.into_value()? },
|
||||
span,
|
||||
)
|
||||
.into_pipeline_data_with_metadata(metadata)),
|
||||
|
@ -59,7 +59,7 @@ impl Command for FromJson {
|
||||
let (string_input, span, metadata) = input.collect_string_strict(span)?;
|
||||
|
||||
if string_input.is_empty() {
|
||||
return Ok(PipelineData::new_with_metadata(metadata, span));
|
||||
return Ok(Value::nothing(span).into_pipeline_data());
|
||||
}
|
||||
|
||||
let strict = call.has_flag(engine_state, stack, "strict")?;
|
||||
|
@ -2,9 +2,8 @@
|
||||
// implementation here is unique.
|
||||
|
||||
use std::{
|
||||
collections::VecDeque,
|
||||
error::Error,
|
||||
io::{self, Cursor, ErrorKind, Write},
|
||||
io::{self, Cursor, ErrorKind},
|
||||
string::FromUtf8Error,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
@ -12,7 +11,6 @@ use std::{
|
||||
use byteorder::{BigEndian, ReadBytesExt};
|
||||
use chrono::{TimeZone, Utc};
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::RawStream;
|
||||
use rmp::decode::{self as mp, ValueReadError};
|
||||
|
||||
/// Max recursion depth
|
||||
@ -121,12 +119,20 @@ MessagePack: https://msgpack.org/
|
||||
read_msgpack(Cursor::new(bytes), opts)
|
||||
}
|
||||
// Deserialize from a raw stream directly without having to collect it
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(raw_stream),
|
||||
..
|
||||
} => read_msgpack(ReadRawStream::new(raw_stream), opts),
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
let span = stream.span();
|
||||
if let Some(reader) = stream.reader() {
|
||||
read_msgpack(reader, opts)
|
||||
} else {
|
||||
Err(ShellError::PipelineMismatch {
|
||||
exp_input_type: "binary or byte stream".into(),
|
||||
dst_span: call.head,
|
||||
src_span: span,
|
||||
})
|
||||
}
|
||||
}
|
||||
input => Err(ShellError::PipelineMismatch {
|
||||
exp_input_type: "binary".into(),
|
||||
exp_input_type: "binary or byte stream".into(),
|
||||
dst_span: call.head,
|
||||
src_span: input.span().unwrap_or(call.head),
|
||||
}),
|
||||
@ -483,57 +489,6 @@ where
|
||||
.map_err(|err| ReadError::Io(err, span))
|
||||
}
|
||||
|
||||
/// Adapter to read MessagePack from a `RawStream`
|
||||
///
|
||||
/// TODO: contribute this back to `RawStream` in general, with more polish, if it works
|
||||
pub(crate) struct ReadRawStream {
|
||||
pub stream: RawStream,
|
||||
// Use a `VecDeque` for read efficiency
|
||||
pub leftover: VecDeque<u8>,
|
||||
}
|
||||
|
||||
impl ReadRawStream {
|
||||
pub(crate) fn new(mut stream: RawStream) -> ReadRawStream {
|
||||
ReadRawStream {
|
||||
leftover: std::mem::take(&mut stream.leftover).into(),
|
||||
stream,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl io::Read for ReadRawStream {
|
||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
if buf.is_empty() {
|
||||
Ok(0)
|
||||
} else if !self.leftover.is_empty() {
|
||||
// Take as many leftover bytes as possible
|
||||
self.leftover.read(buf)
|
||||
} else {
|
||||
// Try to get data from the RawStream. We have to be careful not to break on a zero-len
|
||||
// buffer though, since that would mean EOF
|
||||
loop {
|
||||
if let Some(result) = self.stream.stream.next() {
|
||||
let bytes = result.map_err(|err| io::Error::new(ErrorKind::Other, err))?;
|
||||
if !bytes.is_empty() {
|
||||
let min_len = bytes.len().min(buf.len());
|
||||
let (source, leftover_bytes) = bytes.split_at(min_len);
|
||||
buf[0..min_len].copy_from_slice(source);
|
||||
// Keep whatever bytes we couldn't use in the leftover vec
|
||||
self.leftover.write_all(leftover_bytes)?;
|
||||
return Ok(min_len);
|
||||
} else {
|
||||
// Zero-length buf, continue
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
// End of input
|
||||
return Ok(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an error if this is not the end of file.
|
||||
///
|
||||
/// This can help detect if parsing succeeded incorrectly, perhaps due to corruption.
|
||||
|
@ -2,7 +2,7 @@ use std::io::Cursor;
|
||||
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
use super::msgpack::{read_msgpack, Opts, ReadRawStream};
|
||||
use super::msgpack::{read_msgpack, Opts};
|
||||
|
||||
const BUFFER_SIZE: usize = 65536;
|
||||
|
||||
@ -50,15 +50,21 @@ impl Command for FromMsgpackz {
|
||||
read_msgpack(reader, opts)
|
||||
}
|
||||
// Deserialize from a raw stream directly without having to collect it
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(raw_stream),
|
||||
..
|
||||
} => {
|
||||
let reader = brotli::Decompressor::new(ReadRawStream::new(raw_stream), BUFFER_SIZE);
|
||||
read_msgpack(reader, opts)
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
let span = stream.span();
|
||||
if let Some(reader) = stream.reader() {
|
||||
let reader = brotli::Decompressor::new(reader, BUFFER_SIZE);
|
||||
read_msgpack(reader, opts)
|
||||
} else {
|
||||
Err(ShellError::PipelineMismatch {
|
||||
exp_input_type: "binary or byte stream".into(),
|
||||
dst_span: call.head,
|
||||
src_span: span,
|
||||
})
|
||||
}
|
||||
}
|
||||
_ => Err(ShellError::PipelineMismatch {
|
||||
exp_input_type: "binary".into(),
|
||||
exp_input_type: "binary or byte stream".into(),
|
||||
dst_span: call.head,
|
||||
src_span: span,
|
||||
}),
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user