Refactor: introduce 2 associated functions to PipelineData (#16233)

# Description
As title: this pr is try to introduce 2 functions to `PipelineData`:
1. PipelineData::list_stream --> create a PipelineData::ListStream
2. PipelineData::byte_stream -> create a PipelineData::ByteStream
And use these functions everywhere.

### Reason behind this change
I tried to implement `pipefail` feature, but this would required to
change `PipelineData` from enum to struct. So use these functions can
reduce diff if I finally change to struct. [Discord message
here](https://discord.com/channels/601130461678272522/615962413203718156/1396999539000479784)
is my plan.

# User-Facing Changes
NaN

# Tests + Formatting
NaN

# After Submitting
NaN
This commit is contained in:
Wind
2025-08-02 09:30:30 +08:00
committed by GitHub
parent ee5b5bd39e
commit eb8d2d3206
126 changed files with 299 additions and 304 deletions

View File

@ -72,7 +72,7 @@ impl PluginCommand for CacheGet {
Value::nothing(call.head)
};
Ok(PipelineData::Value(value, None))
Ok(PipelineData::value(value, None))
}
}

View File

@ -50,7 +50,7 @@ impl PluginCommand for CacheRemove {
.map(|ref key| remove_cache_entry(plugin, engine, key, call.head))
.collect::<Result<Vec<Value>, ShellError>>()?;
Ok(PipelineData::Value(Value::list(msgs, call.head), None))
Ok(PipelineData::value(Value::list(msgs, call.head), None))
}
}

View File

@ -67,7 +67,7 @@ fn command(
let names = Value::list(names, call.head);
Ok(PipelineData::Value(names, None))
Ok(PipelineData::value(names, None))
}
#[cfg(test)]

View File

@ -205,7 +205,7 @@ fn command(
"File without extension",
))),
}
.map(|value| PipelineData::Value(value, Some(metadata)))
.map(|value| PipelineData::value(value, Some(metadata)))
}
fn from_parquet(

View File

@ -107,5 +107,5 @@ fn command_lazy(
call.head,
);
Ok(PipelineData::Value(result, None))
Ok(PipelineData::value(result, None))
}

View File

@ -51,7 +51,7 @@ impl PluginCommand for SchemaCmd {
input: PipelineData,
) -> Result<PipelineData, LabeledError> {
if call.has_flag("datatype-list")? {
Ok(PipelineData::Value(datatype_list(Span::unknown()), None))
Ok(PipelineData::value(datatype_list(Span::unknown()), None))
} else {
command(plugin, engine, call, input).map_err(LabeledError::from)
}
@ -68,12 +68,12 @@ fn command(
PolarsPluginObject::NuDataFrame(df) => {
let schema = df.schema();
let value = schema.base_value(call.head)?;
Ok(PipelineData::Value(value, None))
Ok(PipelineData::value(value, None))
}
PolarsPluginObject::NuLazyFrame(mut lazy) => {
let schema = lazy.schema()?;
let value = schema.base_value(call.head)?;
Ok(PipelineData::Value(value, None))
Ok(PipelineData::value(value, None))
}
_ => Err(ShellError::GenericError {
error: "Must be a dataframe or lazy dataframe".into(),

View File

@ -85,7 +85,7 @@ impl PluginCommand for ToLazyFrame {
let mut lazy = NuLazyFrame::from_dataframe(df);
// We don't want this converted back to an eager dataframe at some point
lazy.from_eager = false;
Ok(PipelineData::Value(
Ok(PipelineData::value(
lazy.cache(plugin, engine, call.head)?.into_value(call.head),
None,
))

View File

@ -106,15 +106,15 @@ fn command(
PolarsPluginObject::NuLazyFrame(lazy) => dataframe_command(call, lazy.collect(call.head)?),
PolarsPluginObject::NuExpression(expr) => {
let value = expr.to_value(call.head)?;
Ok(PipelineData::Value(value, None))
Ok(PipelineData::value(value, None))
}
PolarsPluginObject::NuDataType(dt) => {
let value = dt.base_value(call.head)?;
Ok(PipelineData::Value(value, None))
Ok(PipelineData::value(value, None))
}
PolarsPluginObject::NuSchema(schema) => {
let value = schema.base_value(call.head)?;
Ok(PipelineData::Value(value, None))
Ok(PipelineData::value(value, None))
}
_ => Err(cant_convert_err(
&value,
@ -147,7 +147,7 @@ fn dataframe_command(call: &EvaluatedCall, df: NuDataFrame) -> Result<PipelineDa
let value = Value::list(values, call.head);
Ok(PipelineData::Value(value, None))
Ok(PipelineData::value(value, None))
}
#[cfg(test)]

View File

@ -98,7 +98,7 @@ fn dataframe_command(
) -> Result<PipelineData, ShellError> {
let df = NuDataFrame::try_from_value_coerce(plugin, &input, call.head)?;
let value = Value::string(format!("{df}"), call.head);
Ok(PipelineData::Value(value, None))
Ok(PipelineData::value(value, None))
}
#[cfg(test)]

View File

@ -69,7 +69,7 @@ impl PluginCommand for LazyCollect {
let mut eager = lazy.collect(call.head)?;
// We don't want this converted back to a lazy frame
eager.from_lazy = true;
Ok(PipelineData::Value(
Ok(PipelineData::value(
eager
.cache(plugin, engine, call.head)?
.into_value(call.head),
@ -94,7 +94,7 @@ impl PluginCommand for LazyCollect {
let df = NuDataFrame::from_cache_value(cv.value.clone())?;
// just return the dataframe, add to cache again to be safe
Ok(PipelineData::Value(df.into_value(call.head), None))
Ok(PipelineData::value(df.into_value(call.head), None))
}
_ => Err(cant_convert_err(
&value,

View File

@ -48,7 +48,7 @@ to the `polars agg` command with some column expressions for aggregation which t
call: &EvaluatedCall,
_input: PipelineData,
) -> Result<PipelineData, LabeledError> {
Ok(PipelineData::Value(
Ok(PipelineData::value(
Value::string(engine.get_help()?, call.head),
None,
))

View File

@ -406,7 +406,7 @@ pub trait CustomValueSupport: Cacheable {
engine: &EngineInterface,
span: Span,
) -> Result<PipelineData, ShellError> {
Ok(PipelineData::Value(
Ok(PipelineData::value(
self.cache_and_to_value(plugin, engine, span)?,
None,
))