Boxes record for smaller Value enum. (#12252)

<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx

you can also mention related issues, PRs or discussions!
-->

# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.

Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
Boxes `Record` inside `Value` to reduce memory usage, `Value` goes from
`72` -> `56` bytes after this change.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->

# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.

Make sure you've run and fixed any issues with these commands:

- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library

> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->

# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
This commit is contained in:
Filip Andersson 2024-03-26 16:17:44 +01:00 committed by GitHub
parent 24d2c8dd8e
commit b70766e6f5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 174 additions and 78 deletions

View File

@ -2,8 +2,9 @@ use nu_cli::{eval_source, evaluate_commands};
use nu_parser::parse; use nu_parser::parse;
use nu_plugin::{Encoder, EncodingType, PluginCallResponse, PluginOutput}; use nu_plugin::{Encoder, EncodingType, PluginCallResponse, PluginOutput};
use nu_protocol::{ use nu_protocol::{
engine::EngineState, eval_const::create_nu_constant, PipelineData, Span, Spanned, Value, engine::{EngineState, Stack},
NU_VARIABLE_ID, eval_const::create_nu_constant,
PipelineData, Span, Spanned, Value, NU_VARIABLE_ID,
}; };
use nu_std::load_standard_library; use nu_std::load_standard_library;
use nu_utils::{get_default_config, get_default_env}; use nu_utils::{get_default_config, get_default_env};
@ -54,6 +55,61 @@ fn setup_engine() -> EngineState {
engine_state engine_state
} }
fn bench_command(bencher: divan::Bencher, scaled_command: String) {
bench_command_with_custom_stack_and_engine(
bencher,
scaled_command,
Stack::new(),
setup_engine(),
)
}
fn bench_command_with_custom_stack_and_engine(
bencher: divan::Bencher,
scaled_command: String,
stack: nu_protocol::engine::Stack,
mut engine: EngineState,
) {
load_standard_library(&mut engine).unwrap();
let commands = Spanned {
span: Span::unknown(),
item: scaled_command,
};
bencher
.with_inputs(|| engine.clone())
.bench_values(|mut engine| {
evaluate_commands(
&commands,
&mut engine,
&mut stack.clone(),
PipelineData::empty(),
None,
)
.unwrap();
})
}
fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
let mut engine = setup_engine();
let commands = Spanned {
span: Span::unknown(),
item: command.to_string(),
};
let mut stack = Stack::new();
evaluate_commands(
&commands,
&mut engine,
&mut stack,
PipelineData::empty(),
None,
)
.unwrap();
(stack, engine)
}
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking. // FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
// When the *_benchmarks functions were in different files, `cargo bench` would build // When the *_benchmarks functions were in different files, `cargo bench` would build
// an executable for every single one - incredibly slowly. Would be nice to figure out // an executable for every single one - incredibly slowly. Would be nice to figure out
@ -70,31 +126,69 @@ fn load_standard_lib(bencher: divan::Bencher) {
} }
#[divan::bench_group] #[divan::bench_group]
mod eval_commands { mod record {
use super::*; use super::*;
fn bench_command(bencher: divan::Bencher, scaled_command: String) { fn create_flat_record_string(n: i32) -> String {
let mut engine = setup_engine(); let mut s = String::from("let record = {");
load_standard_library(&mut engine).unwrap(); for i in 0..n {
let commands = Spanned { s.push_str(&format!("col_{}: {}", i, i));
span: Span::unknown(), if i < n - 1 {
item: scaled_command, s.push_str(", ");
}; }
}
bencher s.push('}');
.with_inputs(|| engine.clone()) s
.bench_values(|mut engine| {
evaluate_commands(
&commands,
&mut engine,
&mut nu_protocol::engine::Stack::new(),
PipelineData::empty(),
None,
)
.unwrap();
})
} }
fn create_nested_record_string(depth: i32) -> String {
let mut s = String::from("let record = {");
for _ in 0..depth {
s.push_str("col: {{");
}
s.push_str("col_final: 0");
for _ in 0..depth {
s.push('}');
}
s.push('}');
s
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn create(bencher: divan::Bencher, n: i32) {
bench_command(bencher, create_flat_record_string(n));
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn flat_access(bencher: divan::Bencher, n: i32) {
let (stack, engine) = setup_stack_and_engine_from_command(&create_flat_record_string(n));
bench_command_with_custom_stack_and_engine(
bencher,
"$record.col_0 | ignore".to_string(),
stack,
engine,
);
}
#[divan::bench(args = [1, 2, 4, 8, 16, 32, 64, 128])]
fn nest_access(bencher: divan::Bencher, depth: i32) {
let (stack, engine) =
setup_stack_and_engine_from_command(&create_nested_record_string(depth));
let nested_access = ".col".repeat(depth as usize);
bench_command_with_custom_stack_and_engine(
bencher,
format!("$record{} | ignore", nested_access),
stack,
engine,
);
}
}
#[divan::bench_group]
mod eval_commands {
use super::*;
#[divan::bench(args = [100, 1_000, 10_000])] #[divan::bench(args = [100, 1_000, 10_000])]
fn interleave(bencher: divan::Bencher, n: i32) { fn interleave(bencher: divan::Bencher, n: i32) {
bench_command( bench_command(

View File

@ -319,7 +319,7 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
let span = val.span(); let span = val.span();
match val { match val {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for item in val { for item in *val {
// Check if index matches with sublevel // Check if index matches with sublevel
if item.0.as_bytes().to_vec() == next_sublevel { if item.0.as_bytes().to_vec() == next_sublevel {
// If matches try to fetch recursively the next // If matches try to fetch recursively the next

View File

@ -17,7 +17,7 @@ pub fn eval_env_change_hook(
if let Some(hook) = env_change_hook { if let Some(hook) = env_change_hook {
match hook { match hook {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for (env_name, hook_value) in &val { for (env_name, hook_value) in &*val {
let before = engine_state let before = engine_state
.previous_env_vars .previous_env_vars
.get(env_name) .get(env_name)

View File

@ -164,7 +164,7 @@ impl NuDataFrame {
conversion::insert_record(&mut column_values, record, &maybe_schema)? conversion::insert_record(&mut column_values, record, &maybe_schema)?
} }
Value::Record { val: record, .. } => { Value::Record { val: record, .. } => {
conversion::insert_record(&mut column_values, record, &maybe_schema)? conversion::insert_record(&mut column_values, *record, &maybe_schema)?
} }
_ => { _ => {
let key = "0".to_string(); let key = "0".to_string();

View File

@ -319,7 +319,7 @@ fn describe_value(
record!( record!(
"type" => Value::string("record", head), "type" => Value::string("record", head),
"lazy" => Value::bool(false, head), "lazy" => Value::bool(false, head),
"columns" => Value::record(val, head), "columns" => Value::record(*val, head),
), ),
head, head,
) )
@ -410,7 +410,7 @@ fn describe_value(
)?); )?);
} }
record.push("columns", Value::record(val, head)); record.push("columns", Value::record(*val, head));
} else { } else {
let cols = val.column_names(); let cols = val.column_names();
record.push("length", Value::int(cols.len() as i64, head)); record.push("length", Value::int(cols.len() as i64, head));

View File

@ -238,7 +238,7 @@ impl<'a> std::fmt::Debug for DebuggableValue<'a> {
Value::Record { val, .. } => { Value::Record { val, .. } => {
write!(f, "{{")?; write!(f, "{{")?;
let mut first = true; let mut first = true;
for (col, value) in val.into_iter() { for (col, value) in (&**val).into_iter() {
if !first { if !first {
write!(f, ", ")?; write!(f, ", ")?;
} }

View File

@ -182,7 +182,7 @@ fn run_histogram(
match v { match v {
// parse record, and fill valid value to actual input. // parse record, and fill valid value to actual input.
Value::Record { val, .. } => { Value::Record { val, .. } => {
for (c, v) in val { for (c, v) in *val {
if &c == col_name { if &c == col_name {
if let Ok(v) = HashableValue::from_value(v, head_span) { if let Ok(v) = HashableValue::from_value(v, head_span) {
inputs.push(v); inputs.push(v);

View File

@ -135,7 +135,7 @@ fn into_record(
.collect(), .collect(),
span, span,
), ),
Value::Record { val, .. } => Value::record(val, span), Value::Record { val, .. } => Value::record(*val, span),
Value::Error { .. } => input, Value::Error { .. } => input,
other => Value::error( other => Value::error(
ShellError::TypeMismatch { ShellError::TypeMismatch {

View File

@ -58,7 +58,7 @@ impl Command for LoadEnv {
} }
None => match input { None => match input {
PipelineData::Value(Value::Record { val, .. }, ..) => { PipelineData::Value(Value::Record { val, .. }, ..) => {
for (env_var, rhs) in val { for (env_var, rhs) in *val {
let env_var_ = env_var.as_str(); let env_var_ = env_var.as_str();
if ["FILE_PWD", "CURRENT_FILE"].contains(&env_var_) { if ["FILE_PWD", "CURRENT_FILE"].contains(&env_var_) {
return Err(ShellError::AutomaticEnvVarSetManually { return Err(ShellError::AutomaticEnvVarSetManually {

View File

@ -95,7 +95,7 @@ fn with_env(
// single row([[X W]; [Y Z]]) // single row([[X W]; [Y Z]])
match &table[0] { match &table[0] {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for (k, v) in val { for (k, v) in &**val {
env.insert(k.to_string(), v.clone()); env.insert(k.to_string(), v.clone());
} }
} }
@ -123,7 +123,7 @@ fn with_env(
} }
// when get object by `open x.json` or `from json` // when get object by `open x.json` or `from json`
Value::Record { val, .. } => { Value::Record { val, .. } => {
for (k, v) in val { for (k, v) in &**val {
env.insert(k.clone(), v.clone()); env.insert(k.clone(), v.clone());
} }
} }

View File

@ -112,7 +112,7 @@ fn default(
record.push(column.item.clone(), value.clone()); record.push(column.item.clone(), value.clone());
} }
Value::record(record, span) Value::record(*record, span)
} }
_ => item, _ => item,
} }

View File

@ -129,7 +129,7 @@ fn drop_cols(
} => { } => {
let len = record.len().saturating_sub(columns); let len = record.len().saturating_sub(columns);
record.truncate(len); record.truncate(len);
Ok(Value::record(record, span).into_pipeline_data_with_metadata(metadata)) Ok(Value::record(*record, span).into_pipeline_data_with_metadata(metadata))
} }
// Propagate errors // Propagate errors
Value::Error { error, .. } => Err(*error), Value::Error { error, .. } => Err(*error),

View File

@ -170,7 +170,7 @@ fn flat_value(columns: &[CellPath], item: Value, all: bool) -> Vec<Value> {
match value { match value {
Value::Record { val, .. } => { Value::Record { val, .. } => {
if need_flatten { if need_flatten {
for (col, val) in val { for (col, val) in *val {
if out.contains_key(&col) { if out.contains_key(&col) {
out.insert(format!("{column}_{col}"), val); out.insert(format!("{column}_{col}"), val);
} else { } else {
@ -178,9 +178,9 @@ fn flat_value(columns: &[CellPath], item: Value, all: bool) -> Vec<Value> {
} }
} }
} else if out.contains_key(&column) { } else if out.contains_key(&column) {
out.insert(format!("{column}_{column}"), Value::record(val, span)); out.insert(format!("{column}_{column}"), Value::record(*val, span));
} else { } else {
out.insert(column, Value::record(val, span)); out.insert(column, Value::record(*val, span));
} }
} }
Value::List { vals, .. } => { Value::List { vals, .. } => {

View File

@ -228,7 +228,7 @@ fn rename(
} }
} }
Value::record(record, span) Value::record(*record, span)
} }
// Propagate errors by explicitly matching them before the final case. // Propagate errors by explicitly matching them before the final case.
Value::Error { .. } => item.clone(), Value::Error { .. } => item.clone(),

View File

@ -149,7 +149,7 @@ impl Command for Sort {
// Records have two sorting methods, toggled by presence or absence of -v // Records have two sorting methods, toggled by presence or absence of -v
PipelineData::Value(Value::Record { val, .. }, ..) => { PipelineData::Value(Value::Record { val, .. }, ..) => {
let sort_by_value = call.has_flag(engine_state, stack, "values")?; let sort_by_value = call.has_flag(engine_state, stack, "values")?;
let record = sort_record(val, span, sort_by_value, reverse, insensitive, natural); let record = sort_record(*val, span, sort_by_value, reverse, insensitive, natural);
Ok(record.into_pipeline_data()) Ok(record.into_pipeline_data())
} }
// Other values are sorted here // Other values are sorted here

View File

@ -111,7 +111,7 @@ pub fn get_values<'a>(
for item in input { for item in input {
match item { match item {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for (k, v) in val { for (k, v) in &**val {
if let Some(vec) = output.get_mut(k) { if let Some(vec) = output.get_mut(k) {
vec.push(v.clone()); vec.push(v.clone());
} else { } else {

View File

@ -417,7 +417,7 @@ mod tests {
content_tag( content_tag(
"nu", "nu",
indexmap! {}, indexmap! {},
&vec![ &[
content_tag("dev", indexmap! {}, &[content_string("Andrés")]), content_tag("dev", indexmap! {}, &[content_string("Andrés")]),
content_tag("dev", indexmap! {}, &[content_string("JT")]), content_tag("dev", indexmap! {}, &[content_string("JT")]),
content_tag("dev", indexmap! {}, &[content_string("Yehuda")]) content_tag("dev", indexmap! {}, &[content_string("Yehuda")])

View File

@ -135,7 +135,7 @@ pub fn value_to_json_value(v: &Value) -> Result<nu_json::Value, ShellError> {
} }
Value::Record { val, .. } => { Value::Record { val, .. } => {
let mut m = nu_json::Map::new(); let mut m = nu_json::Map::new();
for (k, v) in val { for (k, v) in &**val {
m.insert(k.clone(), value_to_json_value(v)?); m.insert(k.clone(), value_to_json_value(v)?);
} }
nu_json::Value::Object(m) nu_json::Value::Object(m)

View File

@ -252,7 +252,7 @@ pub fn value_to_string(
)), )),
Value::Record { val, .. } => { Value::Record { val, .. } => {
let mut collection = vec![]; let mut collection = vec![];
for (col, val) in val { for (col, val) in &**val {
collection.push(if needs_quotes(col) { collection.push(if needs_quotes(col) {
format!( format!(
"{idt_po}\"{}\": {}", "{idt_po}\"{}\": {}",

View File

@ -60,7 +60,7 @@ fn helper(engine_state: &EngineState, v: &Value) -> Result<toml::Value, ShellErr
Value::String { val, .. } | Value::Glob { val, .. } => toml::Value::String(val.clone()), Value::String { val, .. } | Value::Glob { val, .. } => toml::Value::String(val.clone()),
Value::Record { val, .. } => { Value::Record { val, .. } => {
let mut m = toml::map::Map::new(); let mut m = toml::map::Map::new();
for (k, v) in val { for (k, v) in &**val {
m.insert(k.clone(), helper(engine_state, v)?); m.insert(k.clone(), helper(engine_state, v)?);
} }
toml::Value::Table(m) toml::Value::Table(m)

View File

@ -331,7 +331,7 @@ impl Job {
// content: null}, {tag: a}. See to_xml_entry for more // content: null}, {tag: a}. See to_xml_entry for more
let attrs = match attrs { let attrs = match attrs {
Value::Record { val, .. } => val, Value::Record { val, .. } => val,
Value::Nothing { .. } => Record::new(), Value::Nothing { .. } => Box::new(Record::new()),
_ => { _ => {
return Err(ShellError::CantConvert { return Err(ShellError::CantConvert {
to_type: "XML".into(), to_type: "XML".into(),
@ -355,7 +355,7 @@ impl Job {
} }
}; };
self.write_tag(entry_span, tag, tag_span, attrs, content) self.write_tag(entry_span, tag, tag_span, *attrs, content)
} }
} }

View File

@ -57,7 +57,7 @@ pub fn value_to_yaml_value(v: &Value) -> Result<serde_yaml::Value, ShellError> {
} }
Value::Record { val, .. } => { Value::Record { val, .. } => {
let mut m = serde_yaml::Mapping::new(); let mut m = serde_yaml::Mapping::new();
for (k, v) in val { for (k, v) in &**val {
m.insert( m.insert(
serde_yaml::Value::String(k.clone()), serde_yaml::Value::String(k.clone()),
value_to_yaml_value(v)?, value_to_yaml_value(v)?,

View File

@ -186,7 +186,7 @@ pub fn highlight_search_in_table(
)?; )?;
if has_match { if has_match {
matches.push(Value::record(record, record_span)); matches.push(Value::record(*record, record_span));
} }
} }

View File

@ -29,7 +29,7 @@ fn helper_for_tables(
for val in values { for val in values {
match val { match val {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for (key, value) in val { for (key, value) in &**val {
column_values column_values
.entry(key.clone()) .entry(key.clone())
.and_modify(|v: &mut Vec<Value>| v.push(value.clone())) .and_modify(|v: &mut Vec<Value>| v.push(value.clone()))
@ -90,7 +90,7 @@ pub fn calculate(
*val = mf(slice::from_ref(val), span, name)?; *val = mf(slice::from_ref(val), span, name)?;
Ok(()) Ok(())
})?; })?;
Ok(Value::record(record, span)) Ok(Value::record(*record, span))
} }
PipelineData::Value(Value::Range { val, .. }, ..) => { PipelineData::Value(Value::Range { val, .. }, ..) => {
let new_vals: Result<Vec<Value>, ShellError> = val let new_vals: Result<Vec<Value>, ShellError> = val

View File

@ -221,7 +221,7 @@ pub fn send_request(
Value::Record { val, .. } if body_type == BodyType::Form => { Value::Record { val, .. } if body_type == BodyType::Form => {
let mut data: Vec<(String, String)> = Vec::with_capacity(val.len()); let mut data: Vec<(String, String)> = Vec::with_capacity(val.len());
for (col, val) in val { for (col, val) in *val {
data.push((col, val.coerce_into_string()?)) data.push((col, val.coerce_into_string()?))
} }
@ -335,7 +335,7 @@ pub fn request_add_custom_headers(
match &headers { match &headers {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for (k, v) in val { for (k, v) in &**val {
custom_headers.insert(k.to_string(), v.clone()); custom_headers.insert(k.to_string(), v.clone());
} }
} }
@ -345,7 +345,7 @@ pub fn request_add_custom_headers(
// single row([key1 key2]; [val1 val2]) // single row([key1 key2]; [val1 val2])
match &table[0] { match &table[0] {
Value::Record { val, .. } => { Value::Record { val, .. } => {
for (k, v) in val { for (k, v) in &**val {
custom_headers.insert(k.to_string(), v.clone()); custom_headers.insert(k.to_string(), v.clone());
} }
} }

View File

@ -69,7 +69,7 @@ fn to_url(input: PipelineData, head: Span) -> Result<PipelineData, ShellError> {
match value { match value {
Value::Record { ref val, .. } => { Value::Record { ref val, .. } => {
let mut row_vec = vec![]; let mut row_vec = vec![];
for (k, v) in val { for (k, v) in &**val {
match v.coerce_string() { match v.coerce_string() {
Ok(s) => { Ok(s) => {
row_vec.push((k.clone(), s)); row_vec.push((k.clone(), s));

View File

@ -413,7 +413,7 @@ fn handle_table_command(
} }
PipelineData::Value(Value::Record { val, .. }, ..) => { PipelineData::Value(Value::Record { val, .. }, ..) => {
input.data = PipelineData::Empty; input.data = PipelineData::Empty;
handle_record(input, cfg, val) handle_record(input, cfg, *val)
} }
PipelineData::Value(Value::LazyRecord { val, .. }, ..) => { PipelineData::Value(Value::LazyRecord { val, .. }, ..) => {
input.data = val.collect()?.into_pipeline_data(); input.data = val.collect()?.into_pipeline_data();

View File

@ -378,8 +378,8 @@ fn get_argument_for_color_value(
) -> Option<Argument> { ) -> Option<Argument> {
match color { match color {
Value::Record { val, .. } => { Value::Record { val, .. } => {
let record_exp: Vec<RecordItem> = val let record_exp: Vec<RecordItem> = (**val)
.into_iter() .iter()
.map(|(k, v)| { .map(|(k, v)| {
RecordItem::Pair( RecordItem::Pair(
Expression { Expression {

View File

@ -18,7 +18,7 @@ pub fn try_build_table(
let span = value.span(); let span = value.span();
match value { match value {
Value::List { vals, .. } => try_build_list(vals, ctrlc, config, span, style_computer), Value::List { vals, .. } => try_build_list(vals, ctrlc, config, span, style_computer),
Value::Record { val, .. } => try_build_map(val, span, style_computer, ctrlc, config), Value::Record { val, .. } => try_build_map(*val, span, style_computer, ctrlc, config),
val if matches!(val, Value::String { .. }) => { val if matches!(val, Value::String { .. }) => {
nu_value_to_string_clean(&val, config, style_computer).0 nu_value_to_string_clean(&val, config, style_computer).0
} }

View File

@ -38,7 +38,7 @@ pub(super) fn create_hooks(value: &Value) -> Result<Hooks, ShellError> {
Value::Record { val, .. } => { Value::Record { val, .. } => {
let mut hooks = Hooks::new(); let mut hooks = Hooks::new();
for (col, val) in val { for (col, val) in &**val {
match col.as_str() { match col.as_str() {
"pre_prompt" => hooks.pre_prompt = Some(val.clone()), "pre_prompt" => hooks.pre_prompt = Some(val.clone()),
"pre_execution" => hooks.pre_execution = Some(val.clone()), "pre_execution" => hooks.pre_execution = Some(val.clone()),

View File

@ -23,7 +23,7 @@ impl Matcher for Pattern {
Pattern::Record(field_patterns) => match value { Pattern::Record(field_patterns) => match value {
Value::Record { val, .. } => { Value::Record { val, .. } => {
'top: for field_pattern in field_patterns { 'top: for field_pattern in field_patterns {
for (col, val) in val { for (col, val) in &**val {
if col == &field_pattern.0 { if col == &field_pattern.0 {
// We have found the field // We have found the field
let result = field_pattern.1.match_value(val, matches); let result = field_pattern.1.match_value(val, matches);

View File

@ -76,7 +76,7 @@ pub trait Eval {
RecordItem::Spread(_, inner) => { RecordItem::Spread(_, inner) => {
match Self::eval::<D>(state, mut_state, inner)? { match Self::eval::<D>(state, mut_state, inner)? {
Value::Record { val: inner_val, .. } => { Value::Record { val: inner_val, .. } => {
for (col_name, val) in inner_val { for (col_name, val) in *inner_val {
if let Some(orig_span) = col_names.get(&col_name) { if let Some(orig_span) = col_names.get(&col_name) {
return Err(ShellError::ColumnDefinedTwice { return Err(ShellError::ColumnDefinedTwice {
col_name, col_name,

View File

@ -538,7 +538,7 @@ impl FromValue for Vec<Value> {
impl FromValue for Record { impl FromValue for Record {
fn from_value(v: Value) -> Result<Self, ShellError> { fn from_value(v: Value) -> Result<Self, ShellError> {
match v { match v {
Value::Record { val, .. } => Ok(val), Value::Record { val, .. } => Ok(*val),
v => Err(ShellError::CantConvert { v => Err(ShellError::CantConvert {
to_type: "Record".into(), to_type: "Record".into(),
from_type: v.get_type().to_string(), from_type: v.get_type().to_string(),

View File

@ -106,7 +106,7 @@ pub enum Value {
internal_span: Span, internal_span: Span,
}, },
Record { Record {
val: Record, val: Box<Record>,
// note: spans are being refactored out of Value // note: spans are being refactored out of Value
// please use .span() instead of matching this span value // please use .span() instead of matching this span value
#[serde(rename = "span")] #[serde(rename = "span")]
@ -534,7 +534,7 @@ impl Value {
/// Unwraps the inner [`Record`] value or returns an error if this `Value` is not a record /// Unwraps the inner [`Record`] value or returns an error if this `Value` is not a record
pub fn into_record(self) -> Result<Record, ShellError> { pub fn into_record(self) -> Result<Record, ShellError> {
if let Value::Record { val, .. } = self { if let Value::Record { val, .. } = self {
Ok(val) Ok(*val)
} else { } else {
self.cant_convert_to("record") self.cant_convert_to("record")
} }
@ -1994,7 +1994,7 @@ impl Value {
pub fn record(val: Record, span: Span) -> Value { pub fn record(val: Record, span: Span) -> Value {
Value::Record { Value::Record {
val, val: Box::new(val),
internal_span: span, internal_span: span,
} }
} }

View File

@ -52,17 +52,19 @@ fn colorize_value(value: &mut Value, config: &Config, style_computer: &StyleComp
// Take ownership of the record and reassign to &mut // Take ownership of the record and reassign to &mut
// We do this to have owned keys through `.into_iter` // We do this to have owned keys through `.into_iter`
let record = std::mem::take(val); let record = std::mem::take(val);
*val = record *val = Box::new(
.into_iter() record
.map(|(mut header, mut val)| { .into_iter()
colorize_value(&mut val, config, style_computer); .map(|(mut header, mut val)| {
colorize_value(&mut val, config, style_computer);
if let Some(color) = style.color_style { if let Some(color) = style.color_style {
header = color.paint(header).to_string(); header = color.paint(header).to_string();
} }
(header, val) (header, val)
}) })
.collect::<Record>(); .collect::<Record>(),
);
} }
Value::List { vals, .. } => { Value::List { vals, .. } => {
for val in vals { for val in vals {

View File

@ -90,7 +90,7 @@ fn build_table(
fn convert_nu_value_to_table_value(value: Value, config: &Config) -> TableValue { fn convert_nu_value_to_table_value(value: Value, config: &Config) -> TableValue {
match value { match value {
Value::Record { val, .. } => build_vertical_map(val, config), Value::Record { val, .. } => build_vertical_map(*val, config),
Value::List { vals, .. } => { Value::List { vals, .. } => {
let rebuild_array_as_map = is_valid_record(&vals) && count_columns_in_record(&vals) > 0; let rebuild_array_as_map = is_valid_record(&vals) && count_columns_in_record(&vals) > 0;
if rebuild_array_as_map { if rebuild_array_as_map {