Refactor: introduce 2 associated functions to PipelineData (#16233)

# Description
As title: this pr is try to introduce 2 functions to `PipelineData`:
1. PipelineData::list_stream --> create a PipelineData::ListStream
2. PipelineData::byte_stream -> create a PipelineData::ByteStream
And use these functions everywhere.

### Reason behind this change
I tried to implement `pipefail` feature, but this would required to
change `PipelineData` from enum to struct. So use these functions can
reduce diff if I finally change to struct. [Discord message
here](https://discord.com/channels/601130461678272522/615962413203718156/1396999539000479784)
is my plan.

# User-Facing Changes
NaN

# Tests + Formatting
NaN

# After Submitting
NaN
This commit is contained in:
Wind
2025-08-02 09:30:30 +08:00
committed by GitHub
parent ee5b5bd39e
commit eb8d2d3206
126 changed files with 299 additions and 304 deletions

View File

@ -76,7 +76,7 @@ impl Command for BytesAt {
if let PipelineData::ByteStream(stream, metadata) = input {
let stream = stream.slice(call.head, call.arguments_span(), range)?;
Ok(PipelineData::ByteStream(stream, metadata))
Ok(PipelineData::byte_stream(stream, metadata))
} else {
operate(
map_value,

View File

@ -67,7 +67,7 @@ impl Command for BytesCollect {
ByteStreamType::Binary,
);
Ok(PipelineData::ByteStream(output, metadata))
Ok(PipelineData::byte_stream(output, metadata))
}
fn examples(&self) -> Vec<Example> {

View File

@ -129,7 +129,7 @@ fn into_binary(
if let PipelineData::ByteStream(stream, metadata) = input {
// Just set the type - that should be good enough
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
stream.with_type(ByteStreamType::Binary),
metadata,
))

View File

@ -170,7 +170,7 @@ fn string_helper(
// within a string stream is actually valid UTF-8. But refuse to do it if it was already set
// to binary
if stream.type_().is_string_coercible() {
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
stream.with_type(ByteStreamType::String),
metadata,
))

View File

@ -79,7 +79,7 @@ impl Command for SchemaDb {
// TODO: add views and triggers
Ok(PipelineData::Value(Value::record(record, span), None))
Ok(PipelineData::value(Value::record(record, span), None))
}
}

View File

@ -42,7 +42,7 @@ impl Command for Debug {
let raw = call.has_flag(engine_state, stack, "raw")?;
let raw_value = call.has_flag(engine_state, stack, "raw-value")?;
// Should PipelineData::Empty result in an error here?
// Should PipelineData::empty() result in an error here?
input.map(
move |x| {

View File

@ -25,7 +25,7 @@ impl Command for DebugEnv {
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
Ok(PipelineData::Value(
Ok(PipelineData::value(
env_to_strings(engine_state, stack)?.into_value(call.head),
None,
))

View File

@ -35,7 +35,7 @@ impl Command for DebugExperimentalOptions {
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::list(
nu_experimental::ALL
.iter()

View File

@ -124,7 +124,7 @@ pub(super) fn start_editor(
let post_wait_callback = PostWaitCallback::for_job_control(engine_state, None, None);
// Wrap the output into a `PipelineData::ByteStream`.
// Wrap the output into a `PipelineData::byte_stream`.
let child = nu_protocol::process::ChildProcess::new(
child,
None,
@ -133,7 +133,7 @@ pub(super) fn start_editor(
Some(post_wait_callback),
)?;
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::child(child, call.head),
None,
))

View File

@ -33,7 +33,7 @@ impl Command for ConfigUseColors {
.get_config()
.use_ansi_coloring
.get(engine_state);
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::bool(use_ansi_coloring, call.head),
None,
))

View File

@ -120,6 +120,6 @@ impl Command for Mktemp {
});
}
};
Ok(PipelineData::Value(Value::string(res, span), None))
Ok(PipelineData::value(Value::string(res, span), None))
}
}

View File

@ -176,7 +176,7 @@ impl Command for Open {
.map_err(|err| IoError::new(err, arg_span, PathBuf::from(path)))?;
// No content_type by default - Is added later if no converter is found
let stream = PipelineData::ByteStream(
let stream = PipelineData::byte_stream(
ByteStream::file(file, call_span, engine_state.signals().clone()),
Some(PipelineMetadata {
data_source: DataSource::FilePath(path.to_path_buf()),
@ -246,7 +246,7 @@ impl Command for Open {
}
if output.is_empty() {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
} else if output.len() == 1 {
Ok(output.remove(0))
} else {

View File

@ -339,7 +339,7 @@ fn rm(
inner: vec![],
});
} else if !confirmed {
return Ok(PipelineData::Empty);
return Ok(PipelineData::empty());
}
}

View File

@ -191,7 +191,7 @@ impl Command for Save {
}
}
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
PipelineData::ListStream(ls, pipeline_metadata)
if raw || prepare_path(&path, append, force)?.0.extension().is_none() =>

View File

@ -45,7 +45,7 @@ impl Command for Start {
// Attempt to parse the input as a URL
if let Ok(url) = url::Url::parse(path_no_whitespace) {
open_path(url.as_str(), engine_state, stack, path.span)?;
return Ok(PipelineData::Empty);
return Ok(PipelineData::empty());
}
// If it's not a URL, treat it as a file path
let cwd = engine_state.cwd(Some(stack))?;
@ -54,7 +54,7 @@ impl Command for Start {
// Check if the path exists or if it's a valid file/directory
if full_path.exists() {
open_path(full_path, engine_state, stack, path.span)?;
return Ok(PipelineData::Empty);
return Ok(PipelineData::empty());
}
// If neither file nor URL, return an error
Err(ShellError::GenericError {

View File

@ -241,7 +241,7 @@ impl Command for Watch {
new_path.unwrap_or_else(|| "".into()).to_string_lossy(),
head,
))
.run_with_input(PipelineData::Empty);
.run_with_input(PipelineData::empty());
match result {
Ok(val) => val.print_table(engine_state, stack, false, false)?,

View File

@ -199,7 +199,7 @@ pub fn chunk_by(
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(Value::Range { .. }, ..)
| PipelineData::Value(Value::List { .. }, ..)
| PipelineData::ListStream(..) => {

View File

@ -124,11 +124,11 @@ pub fn chunks(
PipelineData::Value(Value::List { vals, .. }, metadata) => {
let chunks = ChunksIter::new(vals, chunk_size, span);
let stream = ListStream::new(chunks, span, engine_state.signals().clone());
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::ListStream(stream, metadata) => {
let stream = stream.modify(|iter| ChunksIter::new(iter, chunk_size, span));
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::Value(Value::Binary { val, .. }, metadata) => {
let chunk_read = ChunkRead {
@ -148,7 +148,7 @@ pub fn chunks(
}
PipelineData::ByteStream(stream, metadata) => {
let pipeline_data = match stream.reader() {
None => PipelineData::Empty,
None => PipelineData::empty(),
Some(reader) => {
let chunk_read = ChunkRead {
reader,

View File

@ -74,7 +74,7 @@ impl Command for Columns {
fn getcol(head: Span, input: PipelineData) -> Result<PipelineData, ShellError> {
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(v, ..) => {
let span = v.span();
let cols = match v {

View File

@ -227,7 +227,7 @@ fn default(
// stream's internal state already preserves the original signals config, so if this
// Signals::empty list stream gets interrupted it will be caught by the underlying iterator
let ls = ListStream::new(stream, span, Signals::empty());
Ok(PipelineData::ListStream(ls, metadata))
Ok(PipelineData::list_stream(ls, metadata))
// Otherwise, return the input as is
} else {
Ok(input)
@ -269,7 +269,7 @@ impl DefaultValue {
DefaultValue::Uncalculated(closure) => {
let value = closure
.item
.run_with_input(PipelineData::Empty)?
.run_with_input(PipelineData::empty())?
.into_value(closure.span)?;
*self = DefaultValue::Calculated(value.clone());
Ok(value)
@ -282,7 +282,7 @@ impl DefaultValue {
fn single_run_pipeline_data(self) -> Result<PipelineData, ShellError> {
match self {
DefaultValue::Uncalculated(mut closure) => {
closure.item.run_with_input(PipelineData::Empty)
closure.item.run_with_input(PipelineData::empty())
}
DefaultValue::Calculated(val) => Ok(val.into_pipeline_data()),
}

View File

@ -108,7 +108,7 @@ fn drop_cols(
metadata,
))
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
PipelineData::Value(mut v, ..) => {
@ -136,7 +136,7 @@ fn drop_cols(
val => Err(unsupported_value_error(&val, head)),
}
}
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
exp_input_type: "table or record".into(),
wrong_type: stream.type_().describe().into(),

View File

@ -106,7 +106,7 @@ with 'transpose' first."#
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(Value::Range { .. }, ..)
| PipelineData::Value(Value::List { .. }, ..)
| PipelineData::ListStream(..) => {
@ -164,7 +164,7 @@ with 'transpose' first."#
})
.into_pipeline_data(head, engine_state.signals().clone()))
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
// This match allows non-iterables to be accepted,

View File

@ -28,7 +28,7 @@ pub fn empty(
}
} else {
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
match stream.reader() {

View File

@ -448,7 +448,7 @@ fn find_in_pipelinedata(
let map_columns_to_search = columns_to_search.clone();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(_, _) => input
.filter(
move |value| {
@ -470,7 +470,7 @@ fn find_in_pipelinedata(
.map(move |x| highlight_matches_in_value(&map_pattern, x, &map_columns_to_search))
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::ByteStream(stream, ..) => {
let span = stream.span();
@ -489,7 +489,7 @@ fn find_in_pipelinedata(
}
Ok(Value::list(output, span).into_pipeline_data())
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
}

View File

@ -167,7 +167,7 @@ fn first_helper(
Err(ShellError::AccessEmptyContent { span: head })
}
} else {
Ok(PipelineData::ListStream(
Ok(PipelineData::list_stream(
stream.modify(|iter| iter.take(rows)),
metadata,
))
@ -191,7 +191,7 @@ fn first_helper(
}
} else {
// Just take 'rows' bytes off the stream, mimicking the binary behavior
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::read(
reader.take(rows as u64),
head,
@ -202,7 +202,7 @@ fn first_helper(
))
}
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
} else {
Err(ShellError::OnlySupportsThisInputType {

View File

@ -264,7 +264,7 @@ fn insert(
value
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
} else {
let stream = stream.map(move |mut value| {
if let Err(e) = value.insert_data_at_cell_path(
@ -278,7 +278,7 @@ fn insert(
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
}
PipelineData::Empty => Err(ShellError::IncompatiblePathAccess {

View File

@ -120,7 +120,7 @@ interleave
.into_iter()
.chain(closures.into_iter().map(|closure| {
ClosureEvalOnce::new(engine_state, stack, closure)
.run_with_input(PipelineData::Empty)
.run_with_input(PipelineData::empty())
}))
.try_for_each(|stream| {
stream.and_then(|stream| {

View File

@ -42,7 +42,7 @@ impl Command for Items {
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(value, ..) => {
let span = value.span();
match value {
@ -55,7 +55,7 @@ impl Command for Items {
let result = closure
.add_arg(Value::string(col, span))
.add_arg(val)
.run_with_input(PipelineData::Empty)
.run_with_input(PipelineData::empty())
.and_then(|data| data.into_value(head));
match result {

View File

@ -85,7 +85,7 @@ impl Command for Join {
Value::String { val: r_on, .. },
) => {
let result = join(rows_1, rows_2, l_on, r_on, join_type, span);
Ok(PipelineData::Value(result, metadata))
Ok(PipelineData::value(result, metadata))
}
_ => Err(ShellError::UnsupportedInput {
msg: "(PipelineData<table>, table, string, string)".into(),

View File

@ -186,7 +186,7 @@ impl Command for Last {
}
}
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
} else {
Err(ShellError::OnlySupportsThisInputType {

View File

@ -57,7 +57,7 @@ impl Command for Lines {
src_span: value.span(),
}),
},
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::ListStream(stream, metadata) => {
let stream = stream.modify(|iter| {
iter.filter_map(move |value| {
@ -81,7 +81,7 @@ impl Command for Lines {
.flatten()
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::ByteStream(stream, ..) => {
if let Some(lines) = stream.lines() {

View File

@ -130,7 +130,7 @@ impl Command for ParEach {
};
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(value, ..) => {
let span = value.span();
match value {

View File

@ -123,7 +123,7 @@ impl Command for Reduce {
acc = closure
.add_arg(value)
.add_arg(acc.clone())
.run_with_input(PipelineData::Value(acc, None))?
.run_with_input(PipelineData::value(acc, None))?
.into_value(head)?;
}

View File

@ -94,7 +94,7 @@ impl Command for Skip {
PipelineData::ByteStream(stream, metadata) => {
if stream.type_().is_binary_coercible() {
let span = stream.span();
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
stream.skip(span, n as u64)?,
metadata,
))

View File

@ -81,7 +81,7 @@ impl Command for Slice {
};
if count == 0 {
Ok(PipelineData::Value(Value::list(vec![], head), None))
Ok(PipelineData::value(Value::list(vec![], head), None))
} else {
let iter = v.into_iter().skip(from).take(count);
Ok(iter.into_pipeline_data(head, engine_state.signals().clone()))
@ -102,7 +102,7 @@ impl Command for Slice {
};
if count == 0 {
Ok(PipelineData::Value(Value::list(vec![], head), None))
Ok(PipelineData::value(Value::list(vec![], head), None))
} else {
let iter = input.into_iter().skip(from).take(count);
Ok(iter.into_pipeline_data(head, engine_state.signals().clone()))

View File

@ -62,7 +62,7 @@ impl Command for Take {
)),
Value::Binary { val, .. } => {
let slice: Vec<u8> = val.into_iter().take(rows_desired).collect();
Ok(PipelineData::Value(Value::binary(slice, span), metadata))
Ok(PipelineData::value(Value::binary(slice, span), metadata))
}
Value::Range { val, .. } => Ok(val
.into_range_iter(span, Signals::empty())
@ -82,14 +82,14 @@ impl Command for Take {
}),
}
}
PipelineData::ListStream(stream, metadata) => Ok(PipelineData::ListStream(
PipelineData::ListStream(stream, metadata) => Ok(PipelineData::list_stream(
stream.modify(|iter| iter.take(rows_desired)),
metadata,
)),
PipelineData::ByteStream(stream, metadata) => {
if stream.type_().is_binary_coercible() {
let span = stream.span();
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
stream.take(span, rows_desired as u64)?,
metadata,
))

View File

@ -138,7 +138,7 @@ use it in your pipeline."#
let tee_thread = spawn_tee(info, eval_block)?;
let tee = IoTee::new(read, tee_thread);
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::read(tee, span, engine_state.signals().clone(), type_),
metadata,
))
@ -151,7 +151,7 @@ use it in your pipeline."#
let tee_thread = spawn_tee(info, eval_block)?;
let tee = IoTee::new(file, tee_thread);
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::read(tee, span, engine_state.signals().clone(), type_),
metadata,
))
@ -234,7 +234,7 @@ use it in your pipeline."#
};
if child.stdout.is_some() || child.stderr.is_some() {
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::child(*child, span),
metadata,
))
@ -243,7 +243,7 @@ use it in your pipeline."#
thread.join().unwrap_or_else(|_| Err(panic_error()))?;
}
child.wait()?;
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
}
@ -439,7 +439,7 @@ fn spawn_tee(
Signals::empty(),
info.type_,
);
eval_block(PipelineData::ByteStream(stream, info.metadata))
eval_block(PipelineData::byte_stream(stream, info.metadata))
})
.map_err(|err| {
IoError::new_with_additional_context(err, info.span, None, "Could not spawn tee")

View File

@ -293,7 +293,7 @@ pub fn transpose(
})
.collect::<Vec<Value>>();
if result_data.len() == 1 && args.as_record {
Ok(PipelineData::Value(
Ok(PipelineData::value(
result_data
.pop()
.expect("already check result only contains one item"),

View File

@ -210,7 +210,7 @@ fn update(
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
} else {
let stream = stream.map(move |mut value| {
if let Err(e) =
@ -222,7 +222,7 @@ fn update(
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
}
PipelineData::Empty => Err(ShellError::IncompatiblePathAccess {

View File

@ -288,7 +288,7 @@ fn upsert(
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
} else {
let stream = stream.map(move |mut value| {
if let Err(e) =
@ -300,7 +300,7 @@ fn upsert(
}
});
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
}
PipelineData::Empty => Err(ShellError::IncompatiblePathAccess {
@ -335,7 +335,7 @@ fn upsert_value_by_closure(
let input = value_at_path
.map(Cow::into_owned)
.map(IntoPipelineData::into_pipeline_data)
.unwrap_or(PipelineData::Empty);
.unwrap_or(PipelineData::empty());
let new_value = closure
.add_arg(arg)
@ -366,7 +366,7 @@ fn upsert_single_value_by_closure(
let input = value_at_path
.map(Cow::into_owned)
.map(IntoPipelineData::into_pipeline_data)
.unwrap_or(PipelineData::Empty);
.unwrap_or(PipelineData::empty());
let new_value = closure
.add_arg(arg)

View File

@ -137,7 +137,7 @@ fn values(
let signals = engine_state.signals().clone();
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(v, ..) => {
let span = v.span();
match v {

View File

@ -120,12 +120,12 @@ impl Command for Window {
PipelineData::Value(Value::List { vals, .. }, metadata) => {
let chunks = WindowGapIter::new(vals, size, stride, remainder, head);
let stream = ListStream::new(chunks, head, engine_state.signals().clone());
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::ListStream(stream, metadata) => {
let stream = stream
.modify(|iter| WindowGapIter::new(iter, size, stride, remainder, head));
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
input => Err(input.unsupported_input_error("list", head)),
}
@ -134,12 +134,12 @@ impl Command for Window {
PipelineData::Value(Value::List { vals, .. }, metadata) => {
let chunks = WindowOverlapIter::new(vals, size, stride, remainder, head);
let stream = ListStream::new(chunks, head, engine_state.signals().clone());
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
PipelineData::ListStream(stream, metadata) => {
let stream = stream
.modify(|iter| WindowOverlapIter::new(iter, size, stride, remainder, head));
Ok(PipelineData::ListStream(stream, metadata))
Ok(PipelineData::list_stream(stream, metadata))
}
input => Err(input.unsupported_input_error("list", head)),
}

View File

@ -36,7 +36,7 @@ impl Command for Wrap {
let metadata = input.metadata();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(Value::Range { .. }, ..)
| PipelineData::Value(Value::List { .. }, ..)
| PipelineData::ListStream { .. } => Ok(input

View File

@ -103,7 +103,7 @@ impl Command for Zip {
let metadata = input.metadata();
let other = if let Value::Closure { val, .. } = other {
// If a closure was provided, evaluate it and consume its stream output
ClosureEvalOnce::new(engine_state, stack, *val).run_with_input(PipelineData::Empty)?
ClosureEvalOnce::new(engine_state, stack, *val).run_with_input(PipelineData::empty())?
} else {
other.into_pipeline_data()
};

View File

@ -95,11 +95,11 @@ pub(super) fn from_delimited_data(
) -> Result<PipelineData, ShellError> {
let metadata = input.metadata().map(|md| md.with_content_type(None));
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(value, ..) => {
let string = value.into_string()?;
let byte_stream = ByteStream::read_string(string, name, Signals::empty());
Ok(PipelineData::ListStream(
Ok(PipelineData::list_stream(
from_delimited_stream(config, byte_stream, name)?,
metadata,
))
@ -110,7 +110,7 @@ pub(super) fn from_delimited_data(
dst_span: name,
src_span: list_stream.span(),
}),
PipelineData::ByteStream(byte_stream, ..) => Ok(PipelineData::ListStream(
PipelineData::ByteStream(byte_stream, ..) => Ok(PipelineData::list_stream(
from_delimited_stream(config, byte_stream, name)?,
metadata,
)),

View File

@ -76,25 +76,27 @@ impl Command for FromJson {
if call.has_flag(engine_state, stack, "objects")? {
// Return a stream of JSON values, one for each non-empty line
match input {
PipelineData::Value(Value::String { val, .. }, ..) => Ok(PipelineData::ListStream(
read_json_lines(
Cursor::new(val),
span,
strict,
engine_state.signals().clone(),
),
metadata,
)),
PipelineData::Value(Value::String { val, .. }, ..) => {
Ok(PipelineData::list_stream(
read_json_lines(
Cursor::new(val),
span,
strict,
engine_state.signals().clone(),
),
metadata,
))
}
PipelineData::ByteStream(stream, ..)
if stream.type_() != ByteStreamType::Binary =>
{
if let Some(reader) = stream.reader() {
Ok(PipelineData::ListStream(
Ok(PipelineData::list_stream(
read_json_lines(reader, span, strict, Signals::empty()),
metadata,
))
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
_ => Err(ShellError::OnlySupportsThisInputType {

View File

@ -168,7 +168,7 @@ fn from_ods(
}
}
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::record(dict.into_iter().collect(), head),
None,
))

View File

@ -181,7 +181,7 @@ fn from_xlsx(
}
}
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::record(dict.into_iter().collect(), head),
None,
))

View File

@ -132,7 +132,7 @@ pub fn to_delimited_data(
Value::Record { val, .. } => val.columns().cloned().collect(),
_ => return Err(make_unsupported_input_error(value.get_type(), head, span)),
};
input = PipelineData::Value(value, metadata.clone());
input = PipelineData::value(value, metadata.clone());
columns
}
};
@ -181,5 +181,5 @@ pub fn to_delimited_data(
},
);
Ok(PipelineData::ByteStream(stream, metadata))
Ok(PipelineData::byte_stream(stream, metadata))
}

View File

@ -76,7 +76,7 @@ impl Command for ToJson {
data_source: nu_protocol::DataSource::None,
content_type: Some(mime::APPLICATION_JSON.to_string()),
};
Ok(PipelineData::Value(res, Some(metadata)))
Ok(PipelineData::value(res, Some(metadata)))
}
_ => Err(ShellError::CantConvert {
to_type: "JSON".into(),

View File

@ -121,10 +121,10 @@ impl Command for ToText {
)
};
Ok(PipelineData::ByteStream(stream, update_metadata(meta)))
Ok(PipelineData::byte_stream(stream, update_metadata(meta)))
}
PipelineData::ByteStream(stream, meta) => {
Ok(PipelineData::ByteStream(stream, update_metadata(meta)))
Ok(PipelineData::byte_stream(stream, update_metadata(meta)))
}
}
}

View File

@ -112,7 +112,7 @@ In this case, generation also stops when the input stream stops."#
let closure_result = closure
.add_arg(state_arg)
.run_with_input(PipelineData::Empty);
.run_with_input(PipelineData::empty());
let (output, next_input) = parse_closure_result(closure_result, head);
// We use `state` to control when to stop, not `output`. By wrapping
@ -135,7 +135,7 @@ In this case, generation also stops when the input stream stops."#
let closure_result = closure
.add_arg(item)
.add_arg(state_arg)
.run_with_input(PipelineData::Empty);
.run_with_input(PipelineData::empty());
let (output, next_input) = parse_closure_result(closure_result, head);
state = next_input;
Some(output)

View File

@ -76,7 +76,7 @@ pub fn calculate(
PipelineData::Value(Value::List { ref vals, .. }, ..) => match &vals[..] {
[Value::Record { .. }, _end @ ..] => helper_for_tables(
vals,
values.span().expect("PipelineData::Value had no span"),
values.span().expect("PipelineData::value had no span"),
name,
mf,
),

View File

@ -143,7 +143,7 @@ pub fn response_to_buffer(
let reader = response.into_reader();
PipelineData::ByteStream(
PipelineData::byte_stream(
ByteStream::read(reader, span, engine_state.signals().clone(), response_type)
.with_known_size(buffer_size),
None,

View File

@ -117,7 +117,7 @@ fn parse(value: Value, head: Span, config: &Config) -> Result<PipelineData, Shel
"params" => params,
};
Ok(PipelineData::Value(Value::record(record, head), None))
Ok(PipelineData::value(Value::record(record, head), None))
}
#[cfg(test)]

View File

@ -89,7 +89,7 @@ impl Command for UrlSplitQuery {
let span = value.span();
let query = value.to_expanded_string("", &stack.get_config(engine_state));
let table = query_string_to_table(&query, call.head, span)?;
Ok(PipelineData::Value(table, None))
Ok(PipelineData::value(table, None))
}
}

View File

@ -168,12 +168,12 @@ fn run(call: &Call, args: &Arguments, input: PipelineData) -> Result<PipelineDat
let metadata = input.metadata();
match input {
PipelineData::Value(val, md) => Ok(PipelineData::Value(handle_value(val, args, head), md)),
PipelineData::ListStream(stream, ..) => Ok(PipelineData::Value(
PipelineData::Value(val, md) => Ok(PipelineData::value(handle_value(val, args, head), md)),
PipelineData::ListStream(stream, ..) => Ok(PipelineData::value(
handle_value(stream.into_value(), args, head),
metadata,
)),
PipelineData::ByteStream(stream, ..) => Ok(PipelineData::Value(
PipelineData::ByteStream(stream, ..) => Ok(PipelineData::value(
handle_value(stream.into_value()?, args, head),
metadata,
)),

View File

@ -66,7 +66,7 @@ impl Command for Clear {
}
};
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
fn examples(&self) -> Vec<Example> {

View File

@ -63,7 +63,7 @@ impl Command for IsTerminal {
}
};
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::bool(is_terminal, call.head),
None,
))

View File

@ -555,7 +555,7 @@ impl Command for ULimit {
set_limits(&limit_value, &res, hard, soft, call.head)?;
}
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
} else {
print_limits(call, engine_state, stack, all, soft, hard)
}

View File

@ -78,7 +78,7 @@ fn bool(
let bool_result: bool = random_bool(probability);
Ok(PipelineData::Value(Value::bool(bool_result, span), None))
Ok(PipelineData::value(Value::bool(bool_result, span), None))
}
#[cfg(test)]

View File

@ -24,7 +24,7 @@ pub(super) fn random_byte_stream(
const OUTPUT_CHUNK_SIZE: usize = 8192;
let mut remaining_bytes = length;
PipelineData::ByteStream(
PipelineData::byte_stream(
ByteStream::from_fn(span, signals.clone(), stream_type, move |out| {
if remaining_bytes == 0 || signals.interrupted() {
return Ok(false);

View File

@ -93,9 +93,9 @@ fn float(
Bound::Unbounded => random_range(range.start()..f64::MAX),
};
Ok(PipelineData::Value(Value::float(value, span), None))
Ok(PipelineData::value(Value::float(value, span), None))
}
None => Ok(PipelineData::Value(
None => Ok(PipelineData::value(
Value::float(random_range(0.0..1.0), span),
None,
)),

View File

@ -97,7 +97,7 @@ fn integer(
Bound::Unbounded => random_range(range.start()..=i64::MAX),
};
Ok(PipelineData::Value(Value::int(value, span), None))
Ok(PipelineData::value(Value::int(value, span), None))
}
Range::FloatRange(_) => Err(ShellError::UnsupportedInput {
msg: "float range".into(),
@ -107,7 +107,7 @@ fn integer(
}),
}
}
None => Ok(PipelineData::Value(
None => Ok(PipelineData::value(
Value::int(random_range(0..=i64::MAX), span),
None,
)),

View File

@ -143,7 +143,7 @@ fn uuid(
}
};
Ok(PipelineData::Value(Value::string(uuid_str, span), None))
Ok(PipelineData::value(Value::string(uuid_str, span), None))
}
fn validate_flags(

View File

@ -270,7 +270,7 @@ pub fn compare_custom_closure(
closure_eval
.add_arg(left.clone())
.add_arg(right.clone())
.run_with_input(PipelineData::Value(
.run_with_input(PipelineData::value(
Value::list(vec![left.clone(), right.clone()], span),
None,
))

View File

@ -160,7 +160,7 @@ fn run(
) -> Result<PipelineData, ShellError> {
let head = call.head;
if list {
return Ok(PipelineData::Value(
return Ok(PipelineData::value(
generate_strftime_list(head, false),
None,
));

View File

@ -195,7 +195,7 @@ fn operate(
.collect::<Vec<_>>();
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Empty => Ok(PipelineData::empty()),
PipelineData::Value(value, ..) => match value {
Value::String { val, .. } => {
let captures = regex
@ -270,7 +270,7 @@ fn operate(
Ok(ListStream::new(iter, head, Signals::empty()).into())
} else {
Ok(PipelineData::Empty)
Ok(PipelineData::empty())
}
}
}

View File

@ -128,7 +128,7 @@ fn run(
},
);
Ok(PipelineData::ByteStream(output, metadata))
Ok(PipelineData::byte_stream(output, metadata))
}
#[cfg(test)]

View File

@ -234,10 +234,10 @@ fn check_parse(
inner: vec![],
})
} else {
Ok(PipelineData::Value(Value::bool(false, call_head), None))
Ok(PipelineData::value(Value::bool(false, call_head), None))
}
} else {
Ok(PipelineData::Value(Value::bool(true, call_head), None))
Ok(PipelineData::value(Value::bool(true, call_head), None))
}
}
@ -289,10 +289,10 @@ fn parse_file_or_dir_module(
inner: vec![],
})
} else {
Ok(PipelineData::Value(Value::bool(false, call_head), None))
Ok(PipelineData::value(Value::bool(false, call_head), None))
}
} else {
Ok(PipelineData::Value(Value::bool(true, call_head), None))
Ok(PipelineData::value(Value::bool(true, call_head), None))
}
}

View File

@ -246,7 +246,7 @@ impl Command for External {
}
Err(stream) => {
command.stdin(Stdio::piped());
Some(PipelineData::ByteStream(stream, metadata))
Some(PipelineData::byte_stream(stream, metadata))
}
},
PipelineData::Empty => {
@ -313,7 +313,7 @@ impl Command for External {
let child_pid = child.pid();
// Wrap the output into a `PipelineData::ByteStream`.
// Wrap the output into a `PipelineData::byte_stream`.
let mut child = ChildProcess::new(
child,
merged_stream,
@ -336,7 +336,7 @@ impl Command for External {
child.ignore_error(true);
}
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
ByteStream::child(child, call.head),
None,
))
@ -478,7 +478,7 @@ fn resolve_globbed_path_to_cwd_relative(
///
/// Note: Avoid using this function when piping data from an external command to
/// another external command, because it copies data unnecessarily. Instead,
/// extract the pipe from the `PipelineData::ByteStream` of the first command
/// extract the pipe from the `PipelineData::byte_stream` of the first command
/// and hand it to the second command directly.
fn write_pipeline_data(
mut engine_state: EngineState,
@ -788,22 +788,22 @@ mod test {
engine_state.add_env_var("PWD".into(), Value::string(cwd, Span::test_data()));
let mut buf = vec![];
let input = PipelineData::Empty;
let input = PipelineData::empty();
write_pipeline_data(engine_state.clone(), stack.clone(), input, &mut buf).unwrap();
assert_eq!(buf, b"");
let mut buf = vec![];
let input = PipelineData::Value(Value::string("foo", Span::unknown()), None);
let input = PipelineData::value(Value::string("foo", Span::unknown()), None);
write_pipeline_data(engine_state.clone(), stack.clone(), input, &mut buf).unwrap();
assert_eq!(buf, b"foo");
let mut buf = vec![];
let input = PipelineData::Value(Value::binary(b"foo", Span::unknown()), None);
let input = PipelineData::value(Value::binary(b"foo", Span::unknown()), None);
write_pipeline_data(engine_state.clone(), stack.clone(), input, &mut buf).unwrap();
assert_eq!(buf, b"foo");
let mut buf = vec![];
let input = PipelineData::ByteStream(
let input = PipelineData::byte_stream(
ByteStream::read(
b"foo".as_slice(),
Span::unknown(),

View File

@ -69,7 +69,7 @@ impl Command for UName {
.to_string())
})
.collect::<Result<Vec<String>, ShellError>>()?;
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::record(
record! {
"kernel-name" => Value::string(outputs[0].clone(), span),

View File

@ -424,13 +424,13 @@ fn handle_table_command(mut input: CmdInput<'_>) -> ShellResult<PipelineData> {
match input.data {
// Binary streams should behave as if they really are `binary` data, and printed as hex
PipelineData::ByteStream(stream, _) if stream.type_() == ByteStreamType::Binary => Ok(
PipelineData::ByteStream(pretty_hex_stream(stream, input.call.head), None),
PipelineData::byte_stream(pretty_hex_stream(stream, input.call.head), None),
),
PipelineData::ByteStream(..) => Ok(input.data),
PipelineData::Value(Value::Binary { val, .. }, ..) => {
let signals = input.engine_state.signals().clone();
let stream = ByteStream::read_binary(val, input.call.head, signals);
Ok(PipelineData::ByteStream(
Ok(PipelineData::byte_stream(
pretty_hex_stream(stream, input.call.head),
None,
))
@ -439,16 +439,16 @@ fn handle_table_command(mut input: CmdInput<'_>) -> ShellResult<PipelineData> {
PipelineData::Value(Value::List { vals, .. }, metadata) => {
let signals = input.engine_state.signals().clone();
let stream = ListStream::new(vals.into_iter(), span, signals);
input.data = PipelineData::Empty;
input.data = PipelineData::empty();
handle_row_stream(input, stream, metadata)
}
PipelineData::ListStream(stream, metadata) => {
input.data = PipelineData::Empty;
input.data = PipelineData::empty();
handle_row_stream(input, stream, metadata)
}
PipelineData::Value(Value::Record { val, .. }, ..) => {
input.data = PipelineData::Empty;
input.data = PipelineData::empty();
handle_record(input, val.into_owned())
}
PipelineData::Value(Value::Error { error, .. }, ..) => {
@ -464,7 +464,7 @@ fn handle_table_command(mut input: CmdInput<'_>) -> ShellResult<PipelineData> {
let signals = input.engine_state.signals().clone();
let stream =
ListStream::new(val.into_range_iter(span, Signals::empty()), span, signals);
input.data = PipelineData::Empty;
input.data = PipelineData::empty();
handle_row_stream(input, stream, metadata)
}
x => Ok(x),
@ -761,7 +761,7 @@ fn handle_row_stream(
Signals::empty(),
ByteStreamType::String,
);
Ok(PipelineData::ByteStream(stream, None))
Ok(PipelineData::byte_stream(stream, None))
}
fn make_clickable_link(

View File

@ -2,9 +2,9 @@ use super::*;
use nu_engine::test_help::{convert_single_value_to_cmd_args, eval_block_with_input};
use nu_engine::{current_dir, eval_expression};
use nu_protocol::{
PipelineData, Span, Spanned, Type, Value,
ast::Call,
engine::{EngineState, Stack, StateWorkingSet},
PipelineData, Span, Spanned, Type, Value,
};
use std::path::PathBuf;
@ -33,19 +33,14 @@ fn test_start_valid_url() {
// Create call for: `start https://www.example.com`
let path = "https://www.example.com".to_string();
let span = Span::test_data();
let span = Span::test_data();
let call = Call::test(
"start",
// The arguments for `start` are just the path in this case
vec![Value::string(path, span)],
);
let result = Start.run(
&engine_state,
&mut stack,
&call,
PipelineData::Empty,
);
let result = Start.run(&engine_state, &mut stack, &call, PipelineData::empty);
assert!(
result.is_ok(),
@ -61,17 +56,9 @@ fn test_start_valid_local_path() {
// Here we'll simulate opening the current directory (`.`).
let path = ".".to_string();
let span = Span::test_data();
let call = Call::test(
"start",
vec![Value::string(path, span)],
);
let call = Call::test("start", vec![Value::string(path, span)]);
let result = Start.run(
&engine_state,
&mut stack,
&call,
PipelineData::Empty,
);
let result = Start.run(&engine_state, &mut stack, &call, PipelineData::empty);
// If the environment is correctly set, it should succeed.
// If you're running in a CI environment or restricted environment
@ -90,17 +77,9 @@ fn test_start_nonexistent_local_path() {
// Create an obviously invalid path
let path = "this_file_does_not_exist_hopefully.txt".to_string();
let span = Span::test_data();
let call = Call::test(
"start",
vec![Value::string(path, span)],
);
let call = Call::test("start", vec![Value::string(path, span)]);
let result = Start.run(
&engine_state,
&mut stack,
&call,
PipelineData::Empty,
);
let result = Start.run(&engine_state, &mut stack, &call, PipelineData::empty);
// We expect an error since the file does not exist
assert!(
@ -117,4 +96,5 @@ fn test_start_nonexistent_local_path() {
} else {
panic!("Unexpected error type, expected ShellError::GenericError");
}
}
}