Refactor: introduce 2 associated functions to PipelineData (#16233)

# Description
As title: this pr is try to introduce 2 functions to `PipelineData`:
1. PipelineData::list_stream --> create a PipelineData::ListStream
2. PipelineData::byte_stream -> create a PipelineData::ByteStream
And use these functions everywhere.

### Reason behind this change
I tried to implement `pipefail` feature, but this would required to
change `PipelineData` from enum to struct. So use these functions can
reduce diff if I finally change to struct. [Discord message
here](https://discord.com/channels/601130461678272522/615962413203718156/1396999539000479784)
is my plan.

# User-Facing Changes
NaN

# Tests + Formatting
NaN

# After Submitting
NaN
This commit is contained in:
Wind
2025-08-02 09:30:30 +08:00
committed by GitHub
parent ee5b5bd39e
commit eb8d2d3206
126 changed files with 299 additions and 304 deletions

View File

@ -278,7 +278,7 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
&mut stack,
&old_contents,
&old_plugin_file_path.to_string_lossy(),
PipelineData::Empty,
PipelineData::empty(),
false,
) != 0
{

View File

@ -61,7 +61,7 @@ fn get_prompt_string(
.and_then(|v| match v {
Value::Closure { val, .. } => {
let result = ClosureEvalOnce::new(engine_state, stack, val.as_ref().clone())
.run_with_input(PipelineData::Empty);
.run_with_input(PipelineData::empty());
trace!(
"get_prompt_string (block) {}:{}:{}",
@ -76,7 +76,7 @@ fn get_prompt_string(
})
.ok()
}
Value::String { .. } => Some(PipelineData::Value(v.clone(), None)),
Value::String { .. } => Some(PipelineData::value(v.clone(), None)),
_ => None,
})
.and_then(|pipeline_data| {

View File

@ -159,7 +159,7 @@ pub(crate) fn add_menus(
engine_state.merge_delta(delta)?;
let mut temp_stack = Stack::new().collect_value();
let input = PipelineData::Empty;
let input = PipelineData::empty();
menu_eval_results.push(eval_block::<WithoutDebug>(
&engine_state,
&mut temp_stack,

View File

@ -2140,7 +2140,8 @@ fn run_external_completion_within_pwd(
assert!(engine_state.merge_delta(delta).is_ok());
assert!(
eval_block::<WithoutDebug>(&engine_state, &mut stack, &block, PipelineData::Empty).is_ok()
eval_block::<WithoutDebug>(&engine_state, &mut stack, &block, PipelineData::empty())
.is_ok()
);
// Merge environment into the permanent state

View File

@ -199,7 +199,7 @@ pub fn merge_input(
engine_state,
stack,
&block,
PipelineData::Value(Value::nothing(Span::unknown()), None),
PipelineData::value(Value::nothing(Span::unknown()), None),
)
.is_ok()
);

View File

@ -12,7 +12,7 @@ use nu_protocol::{
/// ```rust
/// # use nu_engine::command_prelude::*;
/// # use nu_cmd_base::WrapCall;
/// # fn do_command_logic(call: WrapCall) -> Result<PipelineData, ShellError> { Ok(PipelineData::Empty) }
/// # fn do_command_logic(call: WrapCall) -> Result<PipelineData, ShellError> { Ok(PipelineData::empty()) }
///
/// # struct Command {}
/// # impl Command {

View File

@ -72,7 +72,7 @@ impl Command for EachWhile {
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(Value::Range { .. }, ..)
| PipelineData::Value(Value::List { .. }, ..)
| PipelineData::ListStream(..) => {
@ -109,7 +109,7 @@ impl Command for EachWhile {
.fuse()
.into_pipeline_data(head, engine_state.signals().clone()))
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
// This match allows non-iterables to be accepted,

View File

@ -55,7 +55,7 @@ fn from_url(input: PipelineData, head: Span) -> Result<PipelineData, ShellError>
.map(|(k, v)| (k, Value::string(v, head)))
.collect();
Ok(PipelineData::Value(Value::record(record, head), metadata))
Ok(PipelineData::value(Value::record(record, head), metadata))
}
_ => Err(ShellError::UnsupportedInput {
msg: "String not compatible with URL encoding".to_string(),

View File

@ -113,7 +113,7 @@ fn format_bits(
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
if let PipelineData::ByteStream(stream, metadata) = input {
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
byte_stream_to_bits(stream, head),
metadata,
))

View File

@ -191,7 +191,7 @@ fn format(
// We can only handle a Record or a List of Records
match data_as_value {
Value::Record { .. } => match format_record(format_operations, &data_as_value, config) {
Ok(value) => Ok(PipelineData::Value(Value::string(value, head_span), None)),
Ok(value) => Ok(PipelineData::value(Value::string(value, head_span), None)),
Err(value) => Err(value),
},

View File

@ -157,12 +157,12 @@ impl Command for Do {
if !stderr_msg.is_empty() {
child.stderr = Some(ChildPipe::Tee(Box::new(Cursor::new(stderr_msg))));
}
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::child(child, span),
metadata,
))
}
Err(stream) => Ok(PipelineData::ByteStream(stream, metadata)),
Err(stream) => Ok(PipelineData::byte_stream(stream, metadata)),
}
}
Ok(PipelineData::ByteStream(mut stream, metadata))
@ -176,7 +176,7 @@ impl Command for Do {
if let ByteStreamSource::Child(child) = stream.source_mut() {
child.ignore_error(true);
}
Ok(PipelineData::ByteStream(stream, metadata))
Ok(PipelineData::byte_stream(stream, metadata))
}
Ok(PipelineData::Value(Value::Error { .. }, ..)) | Err(_) if ignore_all_errors => {
Ok(PipelineData::empty())
@ -189,7 +189,7 @@ impl Command for Do {
value
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
r => r,
}

View File

@ -66,7 +66,7 @@ impl Command for PluginStop {
}
if found {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
} else {
Err(ShellError::GenericError {
error: format!("Failed to stop the `{}` plugin", name.item),

View File

@ -76,7 +76,7 @@ impl Command for BytesAt {
if let PipelineData::ByteStream(stream, metadata) = input {
let stream = stream.slice(call.head, call.arguments_span(), range)?;
Ok(PipelineData::ByteStream(stream, metadata))
Ok(PipelineData::byte_stream(stream, metadata))
} else {
operate(
map_value,

View File

@ -67,7 +67,7 @@ impl Command for BytesCollect {
ByteStreamType::Binary,
);
Ok(PipelineData::ByteStream(output, metadata))
Ok(PipelineData::byte_stream(output, metadata))
}
fn examples(&self) -> Vec<Example> {

View File

@ -129,7 +129,7 @@ fn into_binary(
if let PipelineData::ByteStream(stream, metadata) = input {
// Just set the type - that should be good enough
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
stream.with_type(ByteStreamType::Binary),
metadata,
))

View File

@ -170,7 +170,7 @@ fn string_helper(
// within a string stream is actually valid UTF-8. But refuse to do it if it was already set
// to binary
if stream.type_().is_string_coercible() {
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
stream.with_type(ByteStreamType::String),
metadata,
))

View File

@ -79,7 +79,7 @@ impl Command for SchemaDb {
// TODO: add views and triggers
Ok(PipelineData::Value(Value::record(record, span), None))
Ok(PipelineData::value(Value::record(record, span), None))
}
}

View File

@ -42,7 +42,7 @@ impl Command for Debug {
let raw = call.has_flag(engine_state, stack, "raw")?;
let raw_value = call.has_flag(engine_state, stack, "raw-value")?;
// Should PipelineData::Empty result in an error here?
// Should PipelineData::empty() result in an error here?
input.map(
move |x| {

View File

@ -25,7 +25,7 @@ impl Command for DebugEnv {
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
Ok(PipelineData::Value(
Ok(PipelineData::value(
env_to_strings(engine_state, stack)?.into_value(call.head),
None,
))

View File

@ -35,7 +35,7 @@ impl Command for DebugExperimentalOptions {
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::list(
nu_experimental::ALL
.iter()

View File

@ -124,7 +124,7 @@ pub(super) fn start_editor(
let post_wait_callback = PostWaitCallback::for_job_control(engine_state, None, None);
// Wrap the output into a `PipelineData::ByteStream`.
// Wrap the output into a `PipelineData::byte_stream`.
let child = nu_protocol::process::ChildProcess::new(
child,
None,
@ -133,7 +133,7 @@ pub(super) fn start_editor(
Some(post_wait_callback),
)?;
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::child(child, call.head),
None,
))

View File

@ -33,7 +33,7 @@ impl Command for ConfigUseColors {
.get_config()
.use_ansi_coloring
.get(engine_state);
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::bool(use_ansi_coloring, call.head),
None,
))

View File

@ -120,6 +120,6 @@ impl Command for Mktemp {
});
}
};
Ok(PipelineData::Value(Value::string(res, span), None))
Ok(PipelineData::value(Value::string(res, span), None))
}
}

View File

@ -176,7 +176,7 @@ impl Command for Open {
.map_err(|err| IoError::new(err, arg_span, PathBuf::from(path)))?;
// No content_type by default - Is added later if no converter is found
let stream = PipelineData::ByteStream(
let stream = PipelineData::byte_stream(
ByteStream::file(file, call_span, engine_state.signals().clone()),
Some(PipelineMetadata {
data_source: DataSource::FilePath(path.to_path_buf()),
@ -246,7 +246,7 @@ impl Command for Open {
}
if output.is_empty() {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
} else if output.len() == 1 {
Ok(output.remove(0))
} else {

View File

@ -339,7 +339,7 @@ fn rm(
inner: vec![],
});
} else if !confirmed {
return Ok(PipelineData::Empty);
return Ok(PipelineData::empty());
}
}

View File

@ -191,7 +191,7 @@ impl Command for Save {
}
}
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
PipelineData::ListStream(ls, pipeline_metadata)
if raw || prepare_path(&path, append, force)?.0.extension().is_none() =>

View File

@ -45,7 +45,7 @@ impl Command for Start {
// Attempt to parse the input as a URL
if let Ok(url) = url::Url::parse(path_no_whitespace) {
open_path(url.as_str(), engine_state, stack, path.span)?;
return Ok(PipelineData::Empty);
return Ok(PipelineData::empty());
}
// If it's not a URL, treat it as a file path
let cwd = engine_state.cwd(Some(stack))?;
@ -54,7 +54,7 @@ impl Command for Start {
// Check if the path exists or if it's a valid file/directory
if full_path.exists() {
open_path(full_path, engine_state, stack, path.span)?;
return Ok(PipelineData::Empty);
return Ok(PipelineData::empty());
}
// If neither file nor URL, return an error
Err(ShellError::GenericError {

View File

@ -241,7 +241,7 @@ impl Command for Watch {
new_path.unwrap_or_else(|| "".into()).to_string_lossy(),
head,
))
.run_with_input(PipelineData::Empty);
.run_with_input(PipelineData::empty());
match result {
Ok(val) => val.print_table(engine_state, stack, false, false)?,

View File

@ -199,7 +199,7 @@ pub fn chunk_by(
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(Value::Range { .. }, ..)
| PipelineData::Value(Value::List { .. }, ..)
| PipelineData::ListStream(..) => {

View File

@ -124,11 +124,11 @@ pub fn chunks(
PipelineData::Value(Value::List { vals, .. }, metadata) => {
let chunks = ChunksIter::new(vals, chunk_size, span);
let stream = ListStream::new(chunks, span, engine_state.signals().clone());
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::ListStream(stream, metadata) => {
let stream = stream.modify(|iter| ChunksIter::new(iter, chunk_size, span));
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::Value(Value::Binary { val, .. }, metadata) => {
let chunk_read = ChunkRead {
@ -148,7 +148,7 @@ pub fn chunks(
}
PipelineData::ByteStream(stream, metadata) => {
let pipeline_data = match stream.reader() {
None => PipelineData::Empty,
None => PipelineData::empty(),
Some(reader) => {
let chunk_read = ChunkRead {
reader,

View File

@ -74,7 +74,7 @@ impl Command for Columns {
fn getcol(head: Span, input: PipelineData) -> Result<PipelineData, ShellError> {
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(v, ..) => {
let span = v.span();
let cols = match v {

View File

@ -227,7 +227,7 @@ fn default(
// stream's internal state already preserves the original signals config, so if this
// Signals::empty list stream gets interrupted it will be caught by the underlying iterator
let ls = ListStream::new(stream, span, Signals::empty());
Ok(PipelineData::ListStream(ls, metadata))
Ok(PipelineData::list_stream(ls, metadata))
// Otherwise, return the input as is
} else {
Ok(input)
@ -269,7 +269,7 @@ impl DefaultValue {
DefaultValue::Uncalculated(closure) => {
let value = closure
.item
.run_with_input(PipelineData::Empty)?
.run_with_input(PipelineData::empty())?
.into_value(closure.span)?;
*self = DefaultValue::Calculated(value.clone());
Ok(value)
@ -282,7 +282,7 @@ impl DefaultValue {
fn single_run_pipeline_data(self) -> Result<PipelineData, ShellError> {
match self {
DefaultValue::Uncalculated(mut closure) => {
closure.item.run_with_input(PipelineData::Empty)
closure.item.run_with_input(PipelineData::empty())
}
DefaultValue::Calculated(val) => Ok(val.into_pipeline_data()),
}

View File

@ -108,7 +108,7 @@ fn drop_cols(
metadata,
))
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
PipelineData::Value(mut v, ..) => {
@ -136,7 +136,7 @@ fn drop_cols(
val => Err(unsupported_value_error(&val, head)),
}
}
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "table or record".into(),
wrong_type: stream.type_().describe().into(),

View File

@ -106,7 +106,7 @@ with 'transpose' first."#
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(Value::Range { .. }, ..)
| PipelineData::Value(Value::List { .. }, ..)
| PipelineData::ListStream(..) => {
@ -164,7 +164,7 @@ with 'transpose' first."#
})
.into_pipeline_data(head, engine_state.signals().clone()))
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
// This match allows non-iterables to be accepted,

View File

@ -28,7 +28,7 @@ pub fn empty(
}
} else {
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
match stream.reader() {

View File

@ -448,7 +448,7 @@ fn find_in_pipelinedata(
let map_columns_to_search = columns_to_search.clone();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(_, _) => input
.filter(
move |value| {
@ -470,7 +470,7 @@ fn find_in_pipelinedata(
.map(move |x| highlight_matches_in_value(&map_pattern, x, &map_columns_to_search))
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
@ -489,7 +489,7 @@ fn find_in_pipelinedata(
}
Ok(Value::list(output, span).into_pipeline_data())
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
}

View File

@ -167,7 +167,7 @@ fn first_helper(
Err(ShellError::AccessEmptyContent { span: head })
}
} else {
Ok(PipelineData::ListStream(
Ok(PipelineData::list_stream(
stream.modify(|iter| iter.take(rows)),
metadata,
))
@ -191,7 +191,7 @@ fn first_helper(
}
} else {
// Just take 'rows' bytes off the stream, mimicking the binary behavior
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::read(
reader.take(rows as u64),
head,
@ -202,7 +202,7 @@ fn first_helper(
))
}
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
} else {
Err(ShellError::OnlySupportsThisInputType {

View File

@ -264,7 +264,7 @@ fn insert(
value
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
} else {
let stream = stream.map(move |mut value| {
if let Err(e) = value.insert_data_at_cell_path(
@ -278,7 +278,7 @@ fn insert(
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
}
PipelineData::Empty => Err(ShellError::IncompatiblePathAccess {

View File

@ -120,7 +120,7 @@ interleave
.into_iter()
.chain(closures.into_iter().map(|closure| {
ClosureEvalOnce::new(engine_state, stack, closure)
.run_with_input(PipelineData::Empty)
.run_with_input(PipelineData::empty())
}))
.try_for_each(|stream| {
stream.and_then(|stream| {

View File

@ -42,7 +42,7 @@ impl Command for Items {
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(value, ..) => {
let span = value.span();
match value {
@ -55,7 +55,7 @@ impl Command for Items {
let result = closure
.add_arg(Value::string(col, span))
.add_arg(val)
.run_with_input(PipelineData::Empty)
.run_with_input(PipelineData::empty())
.and_then(|data| data.into_value(head));
match result {

View File

@ -85,7 +85,7 @@ impl Command for Join {
Value::String { val: r_on, .. },
) => {
let result = join(rows_1, rows_2, l_on, r_on, join_type, span);
Ok(PipelineData::Value(result, metadata))
Ok(PipelineData::value(result, metadata))
}
_ => Err(ShellError::UnsupportedInput {
msg: "(PipelineData<table>, table, string, string)".into(),

View File

@ -186,7 +186,7 @@ impl Command for Last {
}
}
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
} else {
Err(ShellError::OnlySupportsThisInputType {

View File

@ -57,7 +57,7 @@ impl Command for Lines {
src_span: value.span(),
}),
},
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::ListStream(stream, metadata) => {
let stream = stream.modify(|iter| {
iter.filter_map(move |value| {
@ -81,7 +81,7 @@ impl Command for Lines {
.flatten()
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::ByteStream(stream, ..) => {
if let Some(lines) = stream.lines() {

View File

@ -130,7 +130,7 @@ impl Command for ParEach {
};
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(value, ..) => {
let span = value.span();
match value {

View File

@ -123,7 +123,7 @@ impl Command for Reduce {
acc = closure
.add_arg(value)
.add_arg(acc.clone())
.run_with_input(PipelineData::Value(acc, None))?
.run_with_input(PipelineData::value(acc, None))?
.into_value(head)?;
}

View File

@ -94,7 +94,7 @@ impl Command for Skip {
PipelineData::ByteStream(stream, metadata) => {
if stream.type_().is_binary_coercible() {
let span = stream.span();
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
stream.skip(span, n as u64)?,
metadata,
))

View File

@ -81,7 +81,7 @@ impl Command for Slice {
};
if count == 0 {
Ok(PipelineData::Value(Value::list(vec![], head), None))
Ok(PipelineData::value(Value::list(vec![], head), None))
} else {
let iter = v.into_iter().skip(from).take(count);
Ok(iter.into_pipeline_data(head, engine_state.signals().clone()))
@ -102,7 +102,7 @@ impl Command for Slice {
};
if count == 0 {
Ok(PipelineData::Value(Value::list(vec![], head), None))
Ok(PipelineData::value(Value::list(vec![], head), None))
} else {
let iter = input.into_iter().skip(from).take(count);
Ok(iter.into_pipeline_data(head, engine_state.signals().clone()))

View File

@ -62,7 +62,7 @@ impl Command for Take {
)),
Value::Binary { val, .. } => {
let slice: Vec<u8> = val.into_iter().take(rows_desired).collect();
Ok(PipelineData::Value(Value::binary(slice, span), metadata))
Ok(PipelineData::value(Value::binary(slice, span), metadata))
}
Value::Range { val, .. } => Ok(val
.into_range_iter(span, Signals::empty())
@ -82,14 +82,14 @@ impl Command for Take {
}),
}
}
PipelineData::ListStream(stream, metadata) => Ok(PipelineData::ListStream(
PipelineData::ListStream(stream, metadata) => Ok(PipelineData::list_stream(
stream.modify(|iter| iter.take(rows_desired)),
metadata,
)),
PipelineData::ByteStream(stream, metadata) => {
if stream.type_().is_binary_coercible() {
let span = stream.span();
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
stream.take(span, rows_desired as u64)?,
metadata,
))

View File

@ -138,7 +138,7 @@ use it in your pipeline."#
let tee_thread = spawn_tee(info, eval_block)?;
let tee = IoTee::new(read, tee_thread);
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::read(tee, span, engine_state.signals().clone(), type_),
metadata,
))
@ -151,7 +151,7 @@ use it in your pipeline."#
let tee_thread = spawn_tee(info, eval_block)?;
let tee = IoTee::new(file, tee_thread);
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::read(tee, span, engine_state.signals().clone(), type_),
metadata,
))
@ -234,7 +234,7 @@ use it in your pipeline."#
};
if child.stdout.is_some() || child.stderr.is_some() {
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::child(*child, span),
metadata,
))
@ -243,7 +243,7 @@ use it in your pipeline."#
thread.join().unwrap_or_else(|_| Err(panic_error()))?;
}
child.wait()?;
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
}
@ -439,7 +439,7 @@ fn spawn_tee(
Signals::empty(),
info.type_,
);
eval_block(PipelineData::ByteStream(stream, info.metadata))
eval_block(PipelineData::byte_stream(stream, info.metadata))
})
.map_err(|err| {
IoError::new_with_additional_context(err, info.span, None, "Could not spawn tee")

View File

@ -293,7 +293,7 @@ pub fn transpose(
})
.collect::<Vec<Value>>();
if result_data.len() == 1 && args.as_record {
Ok(PipelineData::Value(
Ok(PipelineData::value(
result_data
.pop()
.expect("already check result only contains one item"),

View File

@ -210,7 +210,7 @@ fn update(
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
} else {
let stream = stream.map(move |mut value| {
if let Err(e) =
@ -222,7 +222,7 @@ fn update(
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
}
PipelineData::Empty => Err(ShellError::IncompatiblePathAccess {

View File

@ -288,7 +288,7 @@ fn upsert(
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
} else {
let stream = stream.map(move |mut value| {
if let Err(e) =
@ -300,7 +300,7 @@ fn upsert(
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
}
PipelineData::Empty => Err(ShellError::IncompatiblePathAccess {
@ -335,7 +335,7 @@ fn upsert_value_by_closure(
let input = value_at_path
.map(Cow::into_owned)
.map(IntoPipelineData::into_pipeline_data)
.unwrap_or(PipelineData::Empty);
.unwrap_or(PipelineData::empty());
let new_value = closure
.add_arg(arg)
@ -366,7 +366,7 @@ fn upsert_single_value_by_closure(
let input = value_at_path
.map(Cow::into_owned)
.map(IntoPipelineData::into_pipeline_data)
.unwrap_or(PipelineData::Empty);
.unwrap_or(PipelineData::empty());
let new_value = closure
.add_arg(arg)

View File

@ -137,7 +137,7 @@ fn values(
let signals = engine_state.signals().clone();
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(v, ..) => {
let span = v.span();
match v {

View File

@ -120,12 +120,12 @@ impl Command for Window {
PipelineData::Value(Value::List { vals, .. }, metadata) => {
let chunks = WindowGapIter::new(vals, size, stride, remainder, head);
let stream = ListStream::new(chunks, head, engine_state.signals().clone());
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::ListStream(stream, metadata) => {
let stream = stream
.modify(|iter| WindowGapIter::new(iter, size, stride, remainder, head));
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
input => Err(input.unsupported_input_error("list", head)),
}
@ -134,12 +134,12 @@ impl Command for Window {
PipelineData::Value(Value::List { vals, .. }, metadata) => {
let chunks = WindowOverlapIter::new(vals, size, stride, remainder, head);
let stream = ListStream::new(chunks, head, engine_state.signals().clone());
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::ListStream(stream, metadata) => {
let stream = stream
.modify(|iter| WindowOverlapIter::new(iter, size, stride, remainder, head));
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
input => Err(input.unsupported_input_error("list", head)),
}

View File

@ -36,7 +36,7 @@ impl Command for Wrap {
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(Value::Range { .. }, ..)
| PipelineData::Value(Value::List { .. }, ..)
| PipelineData::ListStream { .. } => Ok(input

View File

@ -103,7 +103,7 @@ impl Command for Zip {
let metadata = input.metadata();
let other = if let Value::Closure { val, .. } = other {
// If a closure was provided, evaluate it and consume its stream output
ClosureEvalOnce::new(engine_state, stack, *val).run_with_input(PipelineData::Empty)?
ClosureEvalOnce::new(engine_state, stack, *val).run_with_input(PipelineData::empty())?
} else {
other.into_pipeline_data()
};

View File

@ -95,11 +95,11 @@ pub(super) fn from_delimited_data(
) -> Result<PipelineData, ShellError> {
let metadata = input.metadata().map(|md| md.with_content_type(None));
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(value, ..) => {
let string = value.into_string()?;
let byte_stream = ByteStream::read_string(string, name, Signals::empty());
Ok(PipelineData::ListStream(
Ok(PipelineData::list_stream(
from_delimited_stream(config, byte_stream, name)?,
metadata,
))
@ -110,7 +110,7 @@ pub(super) fn from_delimited_data(
dst_span: name,
src_span: list_stream.span(),
}),
PipelineData::ByteStream(byte_stream, ..) => Ok(PipelineData::ListStream(
PipelineData::ByteStream(byte_stream, ..) => Ok(PipelineData::list_stream(
from_delimited_stream(config, byte_stream, name)?,
metadata,
)),

View File

@ -76,25 +76,27 @@ impl Command for FromJson {
if call.has_flag(engine_state, stack, "objects")? {
// Return a stream of JSON values, one for each non-empty line
match input {
PipelineData::Value(Value::String { val, .. }, ..) => Ok(PipelineData::ListStream(
read_json_lines(
Cursor::new(val),
span,
strict,
engine_state.signals().clone(),
),
metadata,
)),
PipelineData::Value(Value::String { val, .. }, ..) => {
Ok(PipelineData::list_stream(
read_json_lines(
Cursor::new(val),
span,
strict,
engine_state.signals().clone(),
),
metadata,
))
}
PipelineData::ByteStream(stream, ..)
if stream.type_() != ByteStreamType::Binary =>
{
if let Some(reader) = stream.reader() {
Ok(PipelineData::ListStream(
Ok(PipelineData::list_stream(
read_json_lines(reader, span, strict, Signals::empty()),
metadata,
))
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
_ => Err(ShellError::OnlySupportsThisInputType {

View File

@ -168,7 +168,7 @@ fn from_ods(
}
}
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::record(dict.into_iter().collect(), head),
None,
))

View File

@ -181,7 +181,7 @@ fn from_xlsx(
}
}
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::record(dict.into_iter().collect(), head),
None,
))

View File

@ -132,7 +132,7 @@ pub fn to_delimited_data(
Value::Record { val, .. } => val.columns().cloned().collect(),
_ => return Err(make_unsupported_input_error(value.get_type(), head, span)),
};
input = PipelineData::Value(value, metadata.clone());
input = PipelineData::value(value, metadata.clone());
columns
}
};
@ -181,5 +181,5 @@ pub fn to_delimited_data(
},
);
Ok(PipelineData::ByteStream(stream, metadata))
Ok(PipelineData::byte_stream(stream, metadata))
}

View File

@ -76,7 +76,7 @@ impl Command for ToJson {
data_source: nu_protocol::DataSource::None,
content_type: Some(mime::APPLICATION_JSON.to_string()),
};
Ok(PipelineData::Value(res, Some(metadata)))
Ok(PipelineData::value(res, Some(metadata)))
}
_ => Err(ShellError::CantConvert {
to_type: "JSON".into(),

View File

@ -121,10 +121,10 @@ impl Command for ToText {
)
};
Ok(PipelineData::ByteStream(stream, update_metadata(meta)))
Ok(PipelineData::byte_stream(stream, update_metadata(meta)))
}
PipelineData::ByteStream(stream, meta) => {
Ok(PipelineData::ByteStream(stream, update_metadata(meta)))
Ok(PipelineData::byte_stream(stream, update_metadata(meta)))
}
}
}

View File

@ -112,7 +112,7 @@ In this case, generation also stops when the input stream stops."#
let closure_result = closure
.add_arg(state_arg)
.run_with_input(PipelineData::Empty);
.run_with_input(PipelineData::empty());
let (output, next_input) = parse_closure_result(closure_result, head);
// We use `state` to control when to stop, not `output`. By wrapping
@ -135,7 +135,7 @@ In this case, generation also stops when the input stream stops."#
let closure_result = closure
.add_arg(item)
.add_arg(state_arg)
.run_with_input(PipelineData::Empty);
.run_with_input(PipelineData::empty());
let (output, next_input) = parse_closure_result(closure_result, head);
state = next_input;
Some(output)

View File

@ -76,7 +76,7 @@ pub fn calculate(
PipelineData::Value(Value::List { ref vals, .. }, ..) => match &vals[..] {
[Value::Record { .. }, _end @ ..] => helper_for_tables(
vals,
values.span().expect("PipelineData::Value had no span"),
values.span().expect("PipelineData::value had no span"),
name,
mf,
),

View File

@ -143,7 +143,7 @@ pub fn response_to_buffer(
let reader = response.into_reader();
PipelineData::ByteStream(
PipelineData::byte_stream(
ByteStream::read(reader, span, engine_state.signals().clone(), response_type)
.with_known_size(buffer_size),
None,

View File

@ -117,7 +117,7 @@ fn parse(value: Value, head: Span, config: &Config) -> Result<PipelineData, Shel
"params" => params,
};
Ok(PipelineData::Value(Value::record(record, head), None))
Ok(PipelineData::value(Value::record(record, head), None))
}
#[cfg(test)]

View File

@ -89,7 +89,7 @@ impl Command for UrlSplitQuery {
let span = value.span();
let query = value.to_expanded_string("", &stack.get_config(engine_state));
let table = query_string_to_table(&query, call.head, span)?;
Ok(PipelineData::Value(table, None))
Ok(PipelineData::value(table, None))
}
}

View File

@ -168,12 +168,12 @@ fn run(call: &Call, args: &Arguments, input: PipelineData) -> Result<PipelineDat
let metadata = input.metadata();
match input {
PipelineData::Value(val, md) => Ok(PipelineData::Value(handle_value(val, args, head), md)),
PipelineData::ListStream(stream, ..) => Ok(PipelineData::Value(
PipelineData::Value(val, md) => Ok(PipelineData::value(handle_value(val, args, head), md)),
PipelineData::ListStream(stream, ..) => Ok(PipelineData::value(
handle_value(stream.into_value(), args, head),
metadata,
)),
PipelineData::ByteStream(stream, ..) => Ok(PipelineData::Value(
PipelineData::ByteStream(stream, ..) => Ok(PipelineData::value(
handle_value(stream.into_value()?, args, head),
metadata,
)),

View File

@ -66,7 +66,7 @@ impl Command for Clear {
}
};
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
fn examples(&self) -> Vec<Example> {

View File

@ -63,7 +63,7 @@ impl Command for IsTerminal {
}
};
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::bool(is_terminal, call.head),
None,
))

View File

@ -555,7 +555,7 @@ impl Command for ULimit {
set_limits(&limit_value, &res, hard, soft, call.head)?;
}
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
} else {
print_limits(call, engine_state, stack, all, soft, hard)
}

View File

@ -78,7 +78,7 @@ fn bool(
let bool_result: bool = random_bool(probability);
Ok(PipelineData::Value(Value::bool(bool_result, span), None))
Ok(PipelineData::value(Value::bool(bool_result, span), None))
}
#[cfg(test)]

View File

@ -24,7 +24,7 @@ pub(super) fn random_byte_stream(
const OUTPUT_CHUNK_SIZE: usize = 8192;
let mut remaining_bytes = length;
PipelineData::ByteStream(
PipelineData::byte_stream(
ByteStream::from_fn(span, signals.clone(), stream_type, move |out| {
if remaining_bytes == 0 || signals.interrupted() {
return Ok(false);

View File

@ -93,9 +93,9 @@ fn float(
Bound::Unbounded => random_range(range.start()..f64::MAX),
};
Ok(PipelineData::Value(Value::float(value, span), None))
Ok(PipelineData::value(Value::float(value, span), None))
}
None => Ok(PipelineData::Value(
None => Ok(PipelineData::value(
Value::float(random_range(0.0..1.0), span),
None,
)),

View File

@ -97,7 +97,7 @@ fn integer(
Bound::Unbounded => random_range(range.start()..=i64::MAX),
};
Ok(PipelineData::Value(Value::int(value, span), None))
Ok(PipelineData::value(Value::int(value, span), None))
}
Range::FloatRange(_) => Err(ShellError::UnsupportedInput {
msg: "float range".into(),
@ -107,7 +107,7 @@ fn integer(
}),
}
}
None => Ok(PipelineData::Value(
None => Ok(PipelineData::value(
Value::int(random_range(0..=i64::MAX), span),
None,
)),

View File

@ -143,7 +143,7 @@ fn uuid(
}
};
Ok(PipelineData::Value(Value::string(uuid_str, span), None))
Ok(PipelineData::value(Value::string(uuid_str, span), None))
}
fn validate_flags(

View File

@ -270,7 +270,7 @@ pub fn compare_custom_closure(
closure_eval
.add_arg(left.clone())
.add_arg(right.clone())
.run_with_input(PipelineData::Value(
.run_with_input(PipelineData::value(
Value::list(vec![left.clone(), right.clone()], span),
None,
))

View File

@ -160,7 +160,7 @@ fn run(
) -> Result<PipelineData, ShellError> {
let head = call.head;
if list {
return Ok(PipelineData::Value(
return Ok(PipelineData::value(
generate_strftime_list(head, false),
None,
));

View File

@ -195,7 +195,7 @@ fn operate(
.collect::<Vec<_>>();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(value, ..) => match value {
Value::String { val, .. } => {
let captures = regex
@ -270,7 +270,7 @@ fn operate(
Ok(ListStream::new(iter, head, Signals::empty()).into())
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
}

View File

@ -128,7 +128,7 @@ fn run(
},
);
Ok(PipelineData::ByteStream(output, metadata))
Ok(PipelineData::byte_stream(output, metadata))
}
#[cfg(test)]

View File

@ -234,10 +234,10 @@ fn check_parse(
inner: vec![],
})
} else {
Ok(PipelineData::Value(Value::bool(false, call_head), None))
Ok(PipelineData::value(Value::bool(false, call_head), None))
}
} else {
Ok(PipelineData::Value(Value::bool(true, call_head), None))
Ok(PipelineData::value(Value::bool(true, call_head), None))
}
}
@ -289,10 +289,10 @@ fn parse_file_or_dir_module(
inner: vec![],
})
} else {
Ok(PipelineData::Value(Value::bool(false, call_head), None))
Ok(PipelineData::value(Value::bool(false, call_head), None))
}
} else {
Ok(PipelineData::Value(Value::bool(true, call_head), None))
Ok(PipelineData::value(Value::bool(true, call_head), None))
}
}

View File

@ -246,7 +246,7 @@ impl Command for External {
}
Err(stream) => {
command.stdin(Stdio::piped());
Some(PipelineData::ByteStream(stream, metadata))
Some(PipelineData::byte_stream(stream, metadata))
}
},
PipelineData::Empty => {
@ -313,7 +313,7 @@ impl Command for External {
let child_pid = child.pid();
// Wrap the output into a `PipelineData::ByteStream`.
// Wrap the output into a `PipelineData::byte_stream`.
let mut child = ChildProcess::new(
child,
merged_stream,
@ -336,7 +336,7 @@ impl Command for External {
child.ignore_error(true);
}
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::child(child, call.head),
None,
))
@ -478,7 +478,7 @@ fn resolve_globbed_path_to_cwd_relative(
///
/// Note: Avoid using this function when piping data from an external command to
/// another external command, because it copies data unnecessarily. Instead,
/// extract the pipe from the `PipelineData::ByteStream` of the first command
/// extract the pipe from the `PipelineData::byte_stream` of the first command
/// and hand it to the second command directly.
fn write_pipeline_data(
mut engine_state: EngineState,
@ -788,22 +788,22 @@ mod test {
engine_state.add_env_var("PWD".into(), Value::string(cwd, Span::test_data()));
let mut buf = vec![];
let input = PipelineData::Empty;
let input = PipelineData::empty();
write_pipeline_data(engine_state.clone(), stack.clone(), input, &mut buf).unwrap();
assert_eq!(buf, b"");
let mut buf = vec![];
let input = PipelineData::Value(Value::string("foo", Span::unknown()), None);
let input = PipelineData::value(Value::string("foo", Span::unknown()), None);
write_pipeline_data(engine_state.clone(), stack.clone(), input, &mut buf).unwrap();
assert_eq!(buf, b"foo");
let mut buf = vec![];
let input = PipelineData::Value(Value::binary(b"foo", Span::unknown()), None);
let input = PipelineData::value(Value::binary(b"foo", Span::unknown()), None);
write_pipeline_data(engine_state.clone(), stack.clone(), input, &mut buf).unwrap();
assert_eq!(buf, b"foo");
let mut buf = vec![];
let input = PipelineData::ByteStream(
let input = PipelineData::byte_stream(
ByteStream::read(
b"foo".as_slice(),
Span::unknown(),

View File

@ -69,7 +69,7 @@ impl Command for UName {
.to_string())
})
.collect::<Result<Vec<String>, ShellError>>()?;
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::record(
record! {
"kernel-name" => Value::string(outputs[0].clone(), span),

View File

@ -424,13 +424,13 @@ fn handle_table_command(mut input: CmdInput<'_>) -> ShellResult<PipelineData> {
match input.data {
// Binary streams should behave as if they really are `binary` data, and printed as hex
PipelineData::ByteStream(stream, _) if stream.type_() == ByteStreamType::Binary => Ok(
PipelineData::ByteStream(pretty_hex_stream(stream, input.call.head), None),
PipelineData::byte_stream(pretty_hex_stream(stream, input.call.head), None),
),
PipelineData::ByteStream(..) => Ok(input.data),
PipelineData::Value(Value::Binary { val, .. }, ..) => {
let signals = input.engine_state.signals().clone();
let stream = ByteStream::read_binary(val, input.call.head, signals);
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
pretty_hex_stream(stream, input.call.head),
None,
))
@ -439,16 +439,16 @@ fn handle_table_command(mut input: CmdInput<'_>) -> ShellResult<PipelineData> {
PipelineData::Value(Value::List { vals, .. }, metadata) => {
let signals = input.engine_state.signals().clone();
let stream = ListStream::new(vals.into_iter(), span, signals);
input.data = PipelineData::Empty;
input.data = PipelineData::empty();
handle_row_stream(input, stream, metadata)
}
PipelineData::ListStream(stream, metadata) => {
input.data = PipelineData::Empty;
input.data = PipelineData::empty();
handle_row_stream(input, stream, metadata)
}
PipelineData::Value(Value::Record { val, .. }, ..) => {
input.data = PipelineData::Empty;
input.data = PipelineData::empty();
handle_record(input, val.into_owned())
}
PipelineData::Value(Value::Error { error, .. }, ..) => {
@ -464,7 +464,7 @@ fn handle_table_command(mut input: CmdInput<'_>) -> ShellResult<PipelineData> {
let signals = input.engine_state.signals().clone();
let stream =
ListStream::new(val.into_range_iter(span, Signals::empty()), span, signals);
input.data = PipelineData::Empty;
input.data = PipelineData::empty();
handle_row_stream(input, stream, metadata)
}
x => Ok(x),
@ -761,7 +761,7 @@ fn handle_row_stream(
Signals::empty(),
ByteStreamType::String,
);
Ok(PipelineData::ByteStream(stream, None))
Ok(PipelineData::byte_stream(stream, None))
}
fn make_clickable_link(

View File

@ -2,9 +2,9 @@ use super::*;
use nu_engine::test_help::{convert_single_value_to_cmd_args, eval_block_with_input};
use nu_engine::{current_dir, eval_expression};
use nu_protocol::{
PipelineData, Span, Spanned, Type, Value,
ast::Call,
engine::{EngineState, Stack, StateWorkingSet},
PipelineData, Span, Spanned, Type, Value,
};
use std::path::PathBuf;
@ -33,19 +33,14 @@ fn test_start_valid_url() {
// Create call for: `start https://www.example.com`
let path = "https://www.example.com".to_string();
let span = Span::test_data();
let span = Span::test_data();
let call = Call::test(
"start",
// The arguments for `start` are just the path in this case
vec![Value::string(path, span)],
);
let result = Start.run(
&engine_state,
&mut stack,
&call,
PipelineData::Empty,
);
let result = Start.run(&engine_state, &mut stack, &call, PipelineData::empty);
assert!(
result.is_ok(),
@ -61,17 +56,9 @@ fn test_start_valid_local_path() {
// Here we'll simulate opening the current directory (`.`).
let path = ".".to_string();
let span = Span::test_data();
let call = Call::test(
"start",
vec![Value::string(path, span)],
);
let call = Call::test("start", vec![Value::string(path, span)]);
let result = Start.run(
&engine_state,
&mut stack,
&call,
PipelineData::Empty,
);
let result = Start.run(&engine_state, &mut stack, &call, PipelineData::empty);
// If the environment is correctly set, it should succeed.
// If you're running in a CI environment or restricted environment
@ -90,17 +77,9 @@ fn test_start_nonexistent_local_path() {
// Create an obviously invalid path
let path = "this_file_does_not_exist_hopefully.txt".to_string();
let span = Span::test_data();
let call = Call::test(
"start",
vec![Value::string(path, span)],
);
let call = Call::test("start", vec![Value::string(path, span)]);
let result = Start.run(
&engine_state,
&mut stack,
&call,
PipelineData::Empty,
);
let result = Start.run(&engine_state, &mut stack, &call, PipelineData::empty);
// We expect an error since the file does not exist
assert!(
@ -117,4 +96,5 @@ fn test_start_nonexistent_local_path() {
} else {
panic!("Unexpected error type, expected ShellError::GenericError");
}
}
}

View File

@ -37,7 +37,7 @@ fn eval_fn(debug: bool) -> EvalBlockWithEarlyReturnFn {
/// let mut closure = ClosureEval::new(engine_state, stack, closure);
/// let iter = Vec::<Value>::new()
/// .into_iter()
/// .map(move |value| closure.add_arg(value).run_with_input(PipelineData::Empty));
/// .map(move |value| closure.add_arg(value).run_with_input(PipelineData::empty()));
/// ```
///
/// Many closures follow a simple, common scheme where the pipeline input and the first argument are the same value.
@ -175,7 +175,7 @@ impl ClosureEval {
/// # let value = unimplemented!();
/// let result = ClosureEvalOnce::new(engine_state, stack, closure)
/// .add_arg(value)
/// .run_with_input(PipelineData::Empty);
/// .run_with_input(PipelineData::empty());
/// ```
///
/// Many closures follow a simple, common scheme where the pipeline input and the first argument are the same value.

View File

@ -412,7 +412,7 @@ fn get_command_documentation(
))],
parser_info: HashMap::new(),
},
PipelineData::Value(Value::list(vals, span), None),
PipelineData::value(Value::list(vals, span), None),
) {
if let Ok((str, ..)) = result.collect_string_strict(span) {
let _ = writeln!(long_desc, "\n{help_section_name}Input/output types{RESET}:");
@ -487,7 +487,7 @@ fn get_command_documentation(
engine_state,
stack,
&(&table_call).into(),
PipelineData::Value(result.clone(), None),
PipelineData::value(result.clone(), None),
)
.ok()
});
@ -532,7 +532,7 @@ fn update_ansi_from_config(
arguments: vec![argument],
parser_info: HashMap::new(),
},
PipelineData::Empty,
PipelineData::empty(),
) {
if let Ok((str, ..)) = result.collect_string_strict(span) {
*ansi_code = str;

View File

@ -294,7 +294,7 @@ pub fn eval_block_with_early_return<D: DebugContext>(
input: PipelineData,
) -> Result<PipelineData, ShellError> {
match eval_block::<D>(engine_state, stack, block, input) {
Err(ShellError::Return { span: _, value }) => Ok(PipelineData::Value(*value, None)),
Err(ShellError::Return { span: _, value }) => Ok(PipelineData::value(*value, None)),
x => x,
}
}

View File

@ -48,7 +48,7 @@ pub fn eval_ir_block<D: DebugContext>(
// the heap allocation here by reusing buffers - our allocator is fast enough
let mut registers = Vec::with_capacity(ir_block.register_count as usize);
for _ in 0..ir_block.register_count {
registers.push(PipelineData::Empty);
registers.push(PipelineData::empty());
}
// Initialize file storage.
@ -133,15 +133,15 @@ impl<'a> EvalContext<'a> {
// log::trace!("<- {reg_id}");
std::mem::replace(
&mut self.registers[reg_id.get() as usize],
PipelineData::Empty,
PipelineData::empty(),
)
}
/// Clone data from a register. Must be collected first.
fn clone_reg(&mut self, reg_id: RegId, error_span: Span) -> Result<PipelineData, ShellError> {
match &self.registers[reg_id.get() as usize] {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Value(val, meta) => Ok(PipelineData::Value(val.clone(), meta.clone())),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(val, meta) => Ok(PipelineData::value(val.clone(), meta.clone())),
_ => Err(ShellError::IrEvalError {
msg: "Must collect to value before using instruction that clones from a register"
.into(),
@ -269,7 +269,7 @@ fn prepare_error_handler(
);
} else {
// Set the register to empty
ctx.put_reg(reg_id, PipelineData::Empty);
ctx.put_reg(reg_id, PipelineData::empty());
}
}
}
@ -838,7 +838,7 @@ fn load_literal(
span: Span,
) -> Result<InstructionResult, ShellError> {
let value = literal_value(ctx, lit, span)?;
ctx.put_reg(dst, PipelineData::Value(value, None));
ctx.put_reg(dst, PipelineData::value(value, None));
Ok(InstructionResult::Continue)
}
@ -993,7 +993,7 @@ fn binary_op(
}
};
ctx.put_reg(lhs_dst, PipelineData::Value(result, None));
ctx.put_reg(lhs_dst, PipelineData::value(result, None));
Ok(InstructionResult::Continue)
}
@ -1466,7 +1466,7 @@ fn collect(data: PipelineData, fallback_span: Span) -> Result<PipelineData, Shel
other => other,
};
let value = data.into_value(span)?;
Ok(PipelineData::Value(value, metadata))
Ok(PipelineData::value(value, metadata))
}
/// Helper for drain behavior.
@ -1584,7 +1584,7 @@ fn eval_iterate(
ctx.put_reg(stream, data); // put the stream back so it can be iterated on again
Ok(InstructionResult::Continue)
} else {
ctx.put_reg(dst, PipelineData::Empty);
ctx.put_reg(dst, PipelineData::empty());
Ok(InstructionResult::Branch(end_index))
}
} else {
@ -1594,7 +1594,7 @@ fn eval_iterate(
let span = data.span().unwrap_or(Span::unknown());
ctx.put_reg(
stream,
PipelineData::ListStream(
PipelineData::list_stream(
ListStream::new(data.into_iter(), span, Signals::EMPTY),
metadata,
),

View File

@ -88,8 +88,8 @@ impl Command for Explore {
let result = run_pager(engine_state, &mut stack.clone(), input, config);
match result {
Ok(Some(value)) => Ok(PipelineData::Value(value, None)),
Ok(None) => Ok(PipelineData::Value(Value::default(), None)),
Ok(Some(value)) => Ok(PipelineData::value(value, None)),
Ok(None) => Ok(PipelineData::value(Value::default(), None)),
Err(err) => {
let shell_error = match err.downcast::<ShellError>() {
Ok(e) => e,
@ -102,7 +102,7 @@ impl Command for Explore {
},
};
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::error(shell_error, call.head),
None,
))

View File

@ -23,7 +23,7 @@ pub fn run_command_with_value(
});
}
let pipeline = PipelineData::Value(input.clone(), None);
let pipeline = PipelineData::value(input.clone(), None);
let pipeline = run_nu_command(engine_state, stack, command, pipeline)?;
if let PipelineData::Value(Value::Error { error, .. }, ..) = pipeline {
Err(ShellError::GenericError {

View File

@ -531,7 +531,7 @@ mod tests {
engine_state,
stack,
&block,
PipelineData::Value(Value::nothing(Span::unknown()), None),
PipelineData::value(Value::nothing(Span::unknown()), None),
)
.is_ok()
);

View File

@ -184,20 +184,20 @@ pub trait InterfaceManager {
signals: &Signals,
) -> Result<PipelineData, ShellError> {
self.prepare_pipeline_data(match header {
PipelineDataHeader::Empty => PipelineData::Empty,
PipelineDataHeader::Value(value, metadata) => PipelineData::Value(value, metadata),
PipelineDataHeader::Empty => PipelineData::empty(),
PipelineDataHeader::Value(value, metadata) => PipelineData::value(value, metadata),
PipelineDataHeader::ListStream(info) => {
let handle = self.stream_manager().get_handle();
let reader = handle.read_stream(info.id, self.get_interface())?;
let ls = ListStream::new(reader, info.span, signals.clone());
PipelineData::ListStream(ls, info.metadata)
PipelineData::list_stream(ls, info.metadata)
}
PipelineDataHeader::ByteStream(info) => {
let handle = self.stream_manager().get_handle();
let reader = handle.read_stream(info.id, self.get_interface())?;
let bs =
ByteStream::from_result_iter(reader, info.span, signals.clone(), info.type_);
PipelineData::ByteStream(bs, info.metadata)
PipelineData::byte_stream(bs, info.metadata)
}
})
}

View File

@ -302,7 +302,7 @@ fn write_pipeline_data_empty() -> Result<(), ShellError> {
let manager = TestInterfaceManager::new(&test);
let interface = manager.get_interface();
let (header, writer) = interface.init_write_pipeline_data(PipelineData::Empty, &())?;
let (header, writer) = interface.init_write_pipeline_data(PipelineData::empty(), &())?;
assert!(matches!(header, PipelineDataHeader::Empty));
@ -324,7 +324,7 @@ fn write_pipeline_data_value() -> Result<(), ShellError> {
let value = Value::test_int(7);
let (header, writer) =
interface.init_write_pipeline_data(PipelineData::Value(value.clone(), None), &())?;
interface.init_write_pipeline_data(PipelineData::value(value.clone(), None), &())?;
match header {
PipelineDataHeader::Value(read_value, _) => assert_eq!(value, read_value),
@ -349,7 +349,7 @@ fn write_pipeline_data_prepared_properly() {
// Sending a binary should be an error in our test scenario
let value = Value::test_binary(vec![7, 8]);
match interface.init_write_pipeline_data(PipelineData::Value(value, None), &()) {
match interface.init_write_pipeline_data(PipelineData::value(value, None), &()) {
Ok(_) => panic!("prepare_pipeline_data was not called"),
Err(err) => {
assert_eq!(
@ -376,7 +376,7 @@ fn write_pipeline_data_list_stream() -> Result<(), ShellError> {
];
// Set up pipeline data for a list stream
let pipe = PipelineData::ListStream(
let pipe = PipelineData::list_stream(
ListStream::new(
values.clone().into_iter(),
Span::test_data(),
@ -430,7 +430,7 @@ fn write_pipeline_data_byte_stream() -> Result<(), ShellError> {
let span = Span::new(400, 500);
// Set up pipeline data for a byte stream
let data = PipelineData::ByteStream(
let data = PipelineData::byte_stream(
ByteStream::read(
std::io::Cursor::new(expected),
span,

View File

@ -117,7 +117,7 @@ impl PluginExecutionContext for PluginExecutionCommandContext<'_> {
match value {
Value::Closure { val, .. } => {
ClosureEvalOnce::new(&self.engine_state, &self.stack, *val)
.run_with_input(PipelineData::Empty)
.run_with_input(PipelineData::empty())
.and_then(|data| data.into_value(span))
.unwrap_or_else(|err| Value::error(err, self.call.head))
}

View File

@ -600,7 +600,7 @@ impl InterfaceManager for PluginInterfaceManager {
}
PipelineData::ListStream(stream, meta) => {
let source = self.state.source.clone();
Ok(PipelineData::ListStream(
Ok(PipelineData::list_stream(
stream.map(move |mut value| {
let _ = PluginCustomValueWithSource::add_source_in(&mut value, &source);
value
@ -1101,12 +1101,12 @@ impl Interface for PluginInterface {
match data {
PipelineData::Value(mut value, meta) => {
state.prepare_value(&mut value, &self.state.source)?;
Ok(PipelineData::Value(value, meta))
Ok(PipelineData::value(value, meta))
}
PipelineData::ListStream(stream, meta) => {
let source = self.state.source.clone();
let state = state.clone();
Ok(PipelineData::ListStream(
Ok(PipelineData::list_stream(
stream.map(move |mut value| {
match state.prepare_value(&mut value, &source) {
Ok(()) => value,

View File

@ -646,7 +646,7 @@ fn manager_consume_stream_end_removes_context_only_if_last_stream() -> Result<()
fn manager_prepare_pipeline_data_adds_source_to_values() -> Result<(), ShellError> {
let manager = TestCase::new().plugin("test");
let data = manager.prepare_pipeline_data(PipelineData::Value(
let data = manager.prepare_pipeline_data(PipelineData::value(
Value::test_custom_value(Box::new(test_plugin_custom_value())),
None,
))?;
@ -815,7 +815,7 @@ fn interface_write_plugin_call_writes_run_with_value_input() -> Result<(), Shell
positional: vec![],
named: vec![],
},
input: PipelineData::Value(Value::test_int(-1), Some(metadata0.clone())),
input: PipelineData::value(Value::test_int(-1), Some(metadata0.clone())),
}),
None,
)?;
@ -1072,7 +1072,7 @@ fn interface_run() -> Result<(), ShellError> {
start_fake_plugin_call_responder(manager, 1, move |_| {
vec![ReceivedPluginCallMessage::Response(
PluginCallResponse::PipelineData(PipelineData::Value(Value::test_int(number), None)),
PluginCallResponse::PipelineData(PipelineData::value(Value::test_int(number), None)),
)]
});
@ -1084,7 +1084,7 @@ fn interface_run() -> Result<(), ShellError> {
positional: vec![],
named: vec![],
},
input: PipelineData::Empty,
input: PipelineData::empty(),
},
&mut PluginExecutionBogusContext,
)?;
@ -1106,7 +1106,7 @@ fn interface_custom_value_to_base_value() -> Result<(), ShellError> {
start_fake_plugin_call_responder(manager, 1, move |_| {
vec![ReceivedPluginCallMessage::Response(
PluginCallResponse::PipelineData(PipelineData::Value(Value::test_string(string), None)),
PluginCallResponse::PipelineData(PipelineData::value(Value::test_string(string), None)),
)]
});
@ -1137,7 +1137,7 @@ fn interface_prepare_pipeline_data_accepts_normal_values() -> Result<(), ShellEr
let interface = TestCase::new().plugin("test").get_interface();
let state = CurrentCallState::default();
for value in normal_values(&interface) {
match interface.prepare_pipeline_data(PipelineData::Value(value.clone(), None), &state) {
match interface.prepare_pipeline_data(PipelineData::value(value.clone(), None), &state) {
Ok(data) => assert_eq!(
value.get_type(),
data.into_value(Span::test_data())?.get_type(),
@ -1201,7 +1201,7 @@ fn interface_prepare_pipeline_data_rejects_bad_custom_value() -> Result<(), Shel
let interface = TestCase::new().plugin("test").get_interface();
let state = CurrentCallState::default();
for value in bad_custom_values() {
match interface.prepare_pipeline_data(PipelineData::Value(value.clone(), None), &state) {
match interface.prepare_pipeline_data(PipelineData::value(value.clone(), None), &state) {
Err(err) => match err {
ShellError::CustomValueIncorrectForPlugin { .. } => (),
_ => panic!("expected error type CustomValueIncorrectForPlugin, but got {err:?}"),
@ -1361,7 +1361,7 @@ fn prepare_plugin_call_run() {
positional: vec![Value::test_int(4)],
named: vec![("x".to_owned().into_spanned(span), Some(Value::test_int(6)))],
},
input: PipelineData::Empty,
input: PipelineData::empty(),
}),
),
(
@ -1373,7 +1373,7 @@ fn prepare_plugin_call_run() {
positional: vec![cv_ok.clone()],
named: vec![("ok".to_owned().into_spanned(span), Some(cv_ok.clone()))],
},
input: PipelineData::Empty,
input: PipelineData::empty(),
}),
),
(
@ -1385,7 +1385,7 @@ fn prepare_plugin_call_run() {
positional: vec![cv_bad.clone()],
named: vec![],
},
input: PipelineData::Empty,
input: PipelineData::empty(),
}),
),
(
@ -1397,7 +1397,7 @@ fn prepare_plugin_call_run() {
positional: vec![],
named: vec![("bad".to_owned().into_spanned(span), Some(cv_bad.clone()))],
},
input: PipelineData::Empty,
input: PipelineData::empty(),
}),
),
(
@ -1410,7 +1410,7 @@ fn prepare_plugin_call_run() {
named: vec![],
},
// Shouldn't check input - that happens somewhere else
input: PipelineData::Value(cv_bad.clone(), None),
input: PipelineData::value(cv_bad.clone(), None),
}),
),
];

View File

@ -84,7 +84,7 @@ pub enum PipelineDataHeader {
///
/// Items are sent via [`StreamData`]
ListStream(ListStreamInfo),
/// Initiate [`nu_protocol::PipelineData::ByteStream`].
/// Initiate [`nu_protocol::PipelineData::byte_stream`].
///
/// Items are sent via [`StreamData`]
ByteStream(ByteStreamInfo),
@ -650,11 +650,11 @@ impl<D> EngineCallResponse<D> {
impl EngineCallResponse<PipelineData> {
/// Build an [`EngineCallResponse::PipelineData`] from a [`Value`]
pub fn value(value: Value) -> EngineCallResponse<PipelineData> {
EngineCallResponse::PipelineData(PipelineData::Value(value, None))
EngineCallResponse::PipelineData(PipelineData::value(value, None))
}
/// An [`EngineCallResponse::PipelineData`] with [`PipelineData::Empty`]
/// An [`EngineCallResponse::PipelineData`] with [`PipelineData::empty()`]
pub const fn empty() -> EngineCallResponse<PipelineData> {
EngineCallResponse::PipelineData(PipelineData::Empty)
EngineCallResponse::PipelineData(PipelineData::empty())
}
}

View File

@ -194,7 +194,7 @@ impl PluginTest {
/// # }
/// ```
pub fn eval(&mut self, nu_source: &str) -> Result<PipelineData, ShellError> {
self.eval_with(nu_source, PipelineData::Empty)
self.eval_with(nu_source, PipelineData::empty())
}
/// Test a list of plugin examples. Prints an error for each failing example.

Some files were not shown because too many files have changed in this diff Show More