Boxes record for smaller Value enum. (#12252)

<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx

you can also mention related issues, PRs or discussions!
-->

# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.

Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
Boxes `Record` inside `Value` to reduce memory usage, `Value` goes from
`72` -> `56` bytes after this change.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->

# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.

Make sure you've run and fixed any issues with these commands:

- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library

> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->

# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
This commit is contained in:
Filip Andersson 2024-03-26 16:17:44 +01:00 committed by GitHub
parent 24d2c8dd8e
commit b70766e6f5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 174 additions and 78 deletions

View File

@ -2,8 +2,9 @@ use nu_cli::{eval_source, evaluate_commands};
use nu_parser::parse;
use nu_plugin::{Encoder, EncodingType, PluginCallResponse, PluginOutput};
use nu_protocol::{
engine::EngineState, eval_const::create_nu_constant, PipelineData, Span, Spanned, Value,
NU_VARIABLE_ID,
engine::{EngineState, Stack},
eval_const::create_nu_constant,
PipelineData, Span, Spanned, Value, NU_VARIABLE_ID,
};
use nu_std::load_standard_library;
use nu_utils::{get_default_config, get_default_env};
@ -54,6 +55,61 @@ fn setup_engine() -> EngineState {
engine_state
}
fn bench_command(bencher: divan::Bencher, scaled_command: String) {
bench_command_with_custom_stack_and_engine(
bencher,
scaled_command,
Stack::new(),
setup_engine(),
)
}
fn bench_command_with_custom_stack_and_engine(
bencher: divan::Bencher,
scaled_command: String,
stack: nu_protocol::engine::Stack,
mut engine: EngineState,
) {
load_standard_library(&mut engine).unwrap();
let commands = Spanned {
span: Span::unknown(),
item: scaled_command,
};
bencher
.with_inputs(|| engine.clone())
.bench_values(|mut engine| {
evaluate_commands(
&commands,
&mut engine,
&mut stack.clone(),
PipelineData::empty(),
None,
)
.unwrap();
})
}
fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
let mut engine = setup_engine();
let commands = Spanned {
span: Span::unknown(),
item: command.to_string(),
};
let mut stack = Stack::new();
evaluate_commands(
&commands,
&mut engine,
&mut stack,
PipelineData::empty(),
None,
)
.unwrap();
(stack, engine)
}
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
// When the *_benchmarks functions were in different files, `cargo bench` would build
// an executable for every single one - incredibly slowly. Would be nice to figure out
@ -70,30 +126,68 @@ fn load_standard_lib(bencher: divan::Bencher) {
}
#[divan::bench_group]
mod eval_commands {
mod record {
use super::*;
fn bench_command(bencher: divan::Bencher, scaled_command: String) {
let mut engine = setup_engine();
load_standard_library(&mut engine).unwrap();
let commands = Spanned {
span: Span::unknown(),
item: scaled_command,
};
bencher
.with_inputs(|| engine.clone())
.bench_values(|mut engine| {
evaluate_commands(
&commands,
&mut engine,
&mut nu_protocol::engine::Stack::new(),
PipelineData::empty(),
None,
)
.unwrap();
})
fn create_flat_record_string(n: i32) -> String {
let mut s = String::from("let record = {");
for i in 0..n {
s.push_str(&format!("col_{}: {}", i, i));
if i < n - 1 {
s.push_str(", ");
}
}
s.push('}');
s
}
fn create_nested_record_string(depth: i32) -> String {
let mut s = String::from("let record = {");
for _ in 0..depth {
s.push_str("col: {{");
}
s.push_str("col_final: 0");
for _ in 0..depth {
s.push('}');
}
s.push('}');
s
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn create(bencher: divan::Bencher, n: i32) {
bench_command(bencher, create_flat_record_string(n));
}
#[divan::bench(args = [1, 10, 100, 1000])]
fn flat_access(bencher: divan::Bencher, n: i32) {
let (stack, engine) = setup_stack_and_engine_from_command(&create_flat_record_string(n));
bench_command_with_custom_stack_and_engine(
bencher,
"$record.col_0 | ignore".to_string(),
stack,
engine,
);
}
#[divan::bench(args = [1, 2, 4, 8, 16, 32, 64, 128])]
fn nest_access(bencher: divan::Bencher, depth: i32) {
let (stack, engine) =
setup_stack_and_engine_from_command(&create_nested_record_string(depth));
let nested_access = ".col".repeat(depth as usize);
bench_command_with_custom_stack_and_engine(
bencher,
format!("$record{} | ignore", nested_access),
stack,
engine,
);
}
}
#[divan::bench_group]
mod eval_commands {
use super::*;
#[divan::bench(args = [100, 1_000, 10_000])]
fn interleave(bencher: divan::Bencher, n: i32) {

View File

@ -319,7 +319,7 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
let span = val.span();
match val {
Value::Record { val, .. } => {
for item in val {
for item in *val {
// Check if index matches with sublevel
if item.0.as_bytes().to_vec() == next_sublevel {
// If matches try to fetch recursively the next

View File

@ -17,7 +17,7 @@ pub fn eval_env_change_hook(
if let Some(hook) = env_change_hook {
match hook {
Value::Record { val, .. } => {
for (env_name, hook_value) in &val {
for (env_name, hook_value) in &*val {
let before = engine_state
.previous_env_vars
.get(env_name)

View File

@ -164,7 +164,7 @@ impl NuDataFrame {
conversion::insert_record(&mut column_values, record, &maybe_schema)?
}
Value::Record { val: record, .. } => {
conversion::insert_record(&mut column_values, record, &maybe_schema)?
conversion::insert_record(&mut column_values, *record, &maybe_schema)?
}
_ => {
let key = "0".to_string();

View File

@ -319,7 +319,7 @@ fn describe_value(
record!(
"type" => Value::string("record", head),
"lazy" => Value::bool(false, head),
"columns" => Value::record(val, head),
"columns" => Value::record(*val, head),
),
head,
)
@ -410,7 +410,7 @@ fn describe_value(
)?);
}
record.push("columns", Value::record(val, head));
record.push("columns", Value::record(*val, head));
} else {
let cols = val.column_names();
record.push("length", Value::int(cols.len() as i64, head));

View File

@ -238,7 +238,7 @@ impl<'a> std::fmt::Debug for DebuggableValue<'a> {
Value::Record { val, .. } => {
write!(f, "{{")?;
let mut first = true;
for (col, value) in val.into_iter() {
for (col, value) in (&**val).into_iter() {
if !first {
write!(f, ", ")?;
}

View File

@ -182,7 +182,7 @@ fn run_histogram(
match v {
// parse record, and fill valid value to actual input.
Value::Record { val, .. } => {
for (c, v) in val {
for (c, v) in *val {
if &c == col_name {
if let Ok(v) = HashableValue::from_value(v, head_span) {
inputs.push(v);

View File

@ -135,7 +135,7 @@ fn into_record(
.collect(),
span,
),
Value::Record { val, .. } => Value::record(val, span),
Value::Record { val, .. } => Value::record(*val, span),
Value::Error { .. } => input,
other => Value::error(
ShellError::TypeMismatch {

View File

@ -58,7 +58,7 @@ impl Command for LoadEnv {
}
None => match input {
PipelineData::Value(Value::Record { val, .. }, ..) => {
for (env_var, rhs) in val {
for (env_var, rhs) in *val {
let env_var_ = env_var.as_str();
if ["FILE_PWD", "CURRENT_FILE"].contains(&env_var_) {
return Err(ShellError::AutomaticEnvVarSetManually {

View File

@ -95,7 +95,7 @@ fn with_env(
// single row([[X W]; [Y Z]])
match &table[0] {
Value::Record { val, .. } => {
for (k, v) in val {
for (k, v) in &**val {
env.insert(k.to_string(), v.clone());
}
}
@ -123,7 +123,7 @@ fn with_env(
}
// when get object by `open x.json` or `from json`
Value::Record { val, .. } => {
for (k, v) in val {
for (k, v) in &**val {
env.insert(k.clone(), v.clone());
}
}

View File

@ -112,7 +112,7 @@ fn default(
record.push(column.item.clone(), value.clone());
}
Value::record(record, span)
Value::record(*record, span)
}
_ => item,
}

View File

@ -129,7 +129,7 @@ fn drop_cols(
} => {
let len = record.len().saturating_sub(columns);
record.truncate(len);
Ok(Value::record(record, span).into_pipeline_data_with_metadata(metadata))
Ok(Value::record(*record, span).into_pipeline_data_with_metadata(metadata))
}
// Propagate errors
Value::Error { error, .. } => Err(*error),

View File

@ -170,7 +170,7 @@ fn flat_value(columns: &[CellPath], item: Value, all: bool) -> Vec<Value> {
match value {
Value::Record { val, .. } => {
if need_flatten {
for (col, val) in val {
for (col, val) in *val {
if out.contains_key(&col) {
out.insert(format!("{column}_{col}"), val);
} else {
@ -178,9 +178,9 @@ fn flat_value(columns: &[CellPath], item: Value, all: bool) -> Vec<Value> {
}
}
} else if out.contains_key(&column) {
out.insert(format!("{column}_{column}"), Value::record(val, span));
out.insert(format!("{column}_{column}"), Value::record(*val, span));
} else {
out.insert(column, Value::record(val, span));
out.insert(column, Value::record(*val, span));
}
}
Value::List { vals, .. } => {

View File

@ -228,7 +228,7 @@ fn rename(
}
}
Value::record(record, span)
Value::record(*record, span)
}
// Propagate errors by explicitly matching them before the final case.
Value::Error { .. } => item.clone(),

View File

@ -149,7 +149,7 @@ impl Command for Sort {
// Records have two sorting methods, toggled by presence or absence of -v
PipelineData::Value(Value::Record { val, .. }, ..) => {
let sort_by_value = call.has_flag(engine_state, stack, "values")?;
let record = sort_record(val, span, sort_by_value, reverse, insensitive, natural);
let record = sort_record(*val, span, sort_by_value, reverse, insensitive, natural);
Ok(record.into_pipeline_data())
}
// Other values are sorted here

View File

@ -111,7 +111,7 @@ pub fn get_values<'a>(
for item in input {
match item {
Value::Record { val, .. } => {
for (k, v) in val {
for (k, v) in &**val {
if let Some(vec) = output.get_mut(k) {
vec.push(v.clone());
} else {

View File

@ -417,7 +417,7 @@ mod tests {
content_tag(
"nu",
indexmap! {},
&vec![
&[
content_tag("dev", indexmap! {}, &[content_string("Andrés")]),
content_tag("dev", indexmap! {}, &[content_string("JT")]),
content_tag("dev", indexmap! {}, &[content_string("Yehuda")])

View File

@ -135,7 +135,7 @@ pub fn value_to_json_value(v: &Value) -> Result<nu_json::Value, ShellError> {
}
Value::Record { val, .. } => {
let mut m = nu_json::Map::new();
for (k, v) in val {
for (k, v) in &**val {
m.insert(k.clone(), value_to_json_value(v)?);
}
nu_json::Value::Object(m)

View File

@ -252,7 +252,7 @@ pub fn value_to_string(
)),
Value::Record { val, .. } => {
let mut collection = vec![];
for (col, val) in val {
for (col, val) in &**val {
collection.push(if needs_quotes(col) {
format!(
"{idt_po}\"{}\": {}",

View File

@ -60,7 +60,7 @@ fn helper(engine_state: &EngineState, v: &Value) -> Result<toml::Value, ShellErr
Value::String { val, .. } | Value::Glob { val, .. } => toml::Value::String(val.clone()),
Value::Record { val, .. } => {
let mut m = toml::map::Map::new();
for (k, v) in val {
for (k, v) in &**val {
m.insert(k.clone(), helper(engine_state, v)?);
}
toml::Value::Table(m)

View File

@ -331,7 +331,7 @@ impl Job {
// content: null}, {tag: a}. See to_xml_entry for more
let attrs = match attrs {
Value::Record { val, .. } => val,
Value::Nothing { .. } => Record::new(),
Value::Nothing { .. } => Box::new(Record::new()),
_ => {
return Err(ShellError::CantConvert {
to_type: "XML".into(),
@ -355,7 +355,7 @@ impl Job {
}
};
self.write_tag(entry_span, tag, tag_span, attrs, content)
self.write_tag(entry_span, tag, tag_span, *attrs, content)
}
}

View File

@ -57,7 +57,7 @@ pub fn value_to_yaml_value(v: &Value) -> Result<serde_yaml::Value, ShellError> {
}
Value::Record { val, .. } => {
let mut m = serde_yaml::Mapping::new();
for (k, v) in val {
for (k, v) in &**val {
m.insert(
serde_yaml::Value::String(k.clone()),
value_to_yaml_value(v)?,

View File

@ -186,7 +186,7 @@ pub fn highlight_search_in_table(
)?;
if has_match {
matches.push(Value::record(record, record_span));
matches.push(Value::record(*record, record_span));
}
}

View File

@ -29,7 +29,7 @@ fn helper_for_tables(
for val in values {
match val {
Value::Record { val, .. } => {
for (key, value) in val {
for (key, value) in &**val {
column_values
.entry(key.clone())
.and_modify(|v: &mut Vec<Value>| v.push(value.clone()))
@ -90,7 +90,7 @@ pub fn calculate(
*val = mf(slice::from_ref(val), span, name)?;
Ok(())
})?;
Ok(Value::record(record, span))
Ok(Value::record(*record, span))
}
PipelineData::Value(Value::Range { val, .. }, ..) => {
let new_vals: Result<Vec<Value>, ShellError> = val

View File

@ -221,7 +221,7 @@ pub fn send_request(
Value::Record { val, .. } if body_type == BodyType::Form => {
let mut data: Vec<(String, String)> = Vec::with_capacity(val.len());
for (col, val) in val {
for (col, val) in *val {
data.push((col, val.coerce_into_string()?))
}
@ -335,7 +335,7 @@ pub fn request_add_custom_headers(
match &headers {
Value::Record { val, .. } => {
for (k, v) in val {
for (k, v) in &**val {
custom_headers.insert(k.to_string(), v.clone());
}
}
@ -345,7 +345,7 @@ pub fn request_add_custom_headers(
// single row([key1 key2]; [val1 val2])
match &table[0] {
Value::Record { val, .. } => {
for (k, v) in val {
for (k, v) in &**val {
custom_headers.insert(k.to_string(), v.clone());
}
}

View File

@ -69,7 +69,7 @@ fn to_url(input: PipelineData, head: Span) -> Result<PipelineData, ShellError> {
match value {
Value::Record { ref val, .. } => {
let mut row_vec = vec![];
for (k, v) in val {
for (k, v) in &**val {
match v.coerce_string() {
Ok(s) => {
row_vec.push((k.clone(), s));

View File

@ -413,7 +413,7 @@ fn handle_table_command(
}
PipelineData::Value(Value::Record { val, .. }, ..) => {
input.data = PipelineData::Empty;
handle_record(input, cfg, val)
handle_record(input, cfg, *val)
}
PipelineData::Value(Value::LazyRecord { val, .. }, ..) => {
input.data = val.collect()?.into_pipeline_data();

View File

@ -378,8 +378,8 @@ fn get_argument_for_color_value(
) -> Option<Argument> {
match color {
Value::Record { val, .. } => {
let record_exp: Vec<RecordItem> = val
.into_iter()
let record_exp: Vec<RecordItem> = (**val)
.iter()
.map(|(k, v)| {
RecordItem::Pair(
Expression {

View File

@ -18,7 +18,7 @@ pub fn try_build_table(
let span = value.span();
match value {
Value::List { vals, .. } => try_build_list(vals, ctrlc, config, span, style_computer),
Value::Record { val, .. } => try_build_map(val, span, style_computer, ctrlc, config),
Value::Record { val, .. } => try_build_map(*val, span, style_computer, ctrlc, config),
val if matches!(val, Value::String { .. }) => {
nu_value_to_string_clean(&val, config, style_computer).0
}

View File

@ -38,7 +38,7 @@ pub(super) fn create_hooks(value: &Value) -> Result<Hooks, ShellError> {
Value::Record { val, .. } => {
let mut hooks = Hooks::new();
for (col, val) in val {
for (col, val) in &**val {
match col.as_str() {
"pre_prompt" => hooks.pre_prompt = Some(val.clone()),
"pre_execution" => hooks.pre_execution = Some(val.clone()),

View File

@ -23,7 +23,7 @@ impl Matcher for Pattern {
Pattern::Record(field_patterns) => match value {
Value::Record { val, .. } => {
'top: for field_pattern in field_patterns {
for (col, val) in val {
for (col, val) in &**val {
if col == &field_pattern.0 {
// We have found the field
let result = field_pattern.1.match_value(val, matches);

View File

@ -76,7 +76,7 @@ pub trait Eval {
RecordItem::Spread(_, inner) => {
match Self::eval::<D>(state, mut_state, inner)? {
Value::Record { val: inner_val, .. } => {
for (col_name, val) in inner_val {
for (col_name, val) in *inner_val {
if let Some(orig_span) = col_names.get(&col_name) {
return Err(ShellError::ColumnDefinedTwice {
col_name,

View File

@ -538,7 +538,7 @@ impl FromValue for Vec<Value> {
impl FromValue for Record {
fn from_value(v: Value) -> Result<Self, ShellError> {
match v {
Value::Record { val, .. } => Ok(val),
Value::Record { val, .. } => Ok(*val),
v => Err(ShellError::CantConvert {
to_type: "Record".into(),
from_type: v.get_type().to_string(),

View File

@ -106,7 +106,7 @@ pub enum Value {
internal_span: Span,
},
Record {
val: Record,
val: Box<Record>,
// note: spans are being refactored out of Value
// please use .span() instead of matching this span value
#[serde(rename = "span")]
@ -534,7 +534,7 @@ impl Value {
/// Unwraps the inner [`Record`] value or returns an error if this `Value` is not a record
pub fn into_record(self) -> Result<Record, ShellError> {
if let Value::Record { val, .. } = self {
Ok(val)
Ok(*val)
} else {
self.cant_convert_to("record")
}
@ -1994,7 +1994,7 @@ impl Value {
pub fn record(val: Record, span: Span) -> Value {
Value::Record {
val,
val: Box::new(val),
internal_span: span,
}
}

View File

@ -52,7 +52,8 @@ fn colorize_value(value: &mut Value, config: &Config, style_computer: &StyleComp
// Take ownership of the record and reassign to &mut
// We do this to have owned keys through `.into_iter`
let record = std::mem::take(val);
*val = record
*val = Box::new(
record
.into_iter()
.map(|(mut header, mut val)| {
colorize_value(&mut val, config, style_computer);
@ -62,7 +63,8 @@ fn colorize_value(value: &mut Value, config: &Config, style_computer: &StyleComp
}
(header, val)
})
.collect::<Record>();
.collect::<Record>(),
);
}
Value::List { vals, .. } => {
for val in vals {

View File

@ -90,7 +90,7 @@ fn build_table(
fn convert_nu_value_to_table_value(value: Value, config: &Config) -> TableValue {
match value {
Value::Record { val, .. } => build_vertical_map(val, config),
Value::Record { val, .. } => build_vertical_map(*val, config),
Value::List { vals, .. } => {
let rebuild_array_as_map = is_valid_record(&vals) && count_columns_in_record(&vals) > 0;
if rebuild_array_as_map {