mirror of
https://github.com/nushell/nushell.git
synced 2024-11-22 00:13:21 +01:00
Make EngineState clone cheaper with Arc on all of the heavy objects (#12229)
# Description This makes many of the larger objects in `EngineState` into `Arc`, and uses `Arc::make_mut` to do clone-on-write if the reference is not unique. This is generally very cheap, giving us the best of both worlds - allowing us to mutate without cloning if we have an exclusive reference, and cloning if we don't. This started as more of a curiosity for me after remembering that `Arc::make_mut` exists and can make using `Arc` for mostly immutable data that sometimes needs to be changed very convenient, and also after hearing someone complain about memory usage on Discord - this is a somewhat significant win for that. The exact objects that were wrapped in `Arc`: - `files`, `file_contents` - the strings and byte buffers - `decls` - the whole `Vec`, but mostly to avoid lots of individual `malloc()` calls on Clone rather than for memory usage - `blocks` - the blocks themselves, rather than the outer Vec - `modules` - the modules themselves, rather than the outer Vec - `env_vars`, `previous_env_vars` - the entire maps - `config` The changes required were relatively minimal, but this is a breaking API change. In particular, blocks are added as Arcs, to allow the parser cache functionality to work. With my normal nu config, running on Linux, this saves me about 15 MiB of process memory usage when running interactively (65 MiB → 50 MiB). This also makes quick command executions cheaper, particularly since every REPL loop now involves a clone of the engine state so that we can recover from a panic. It also reduces memory usage where engine state needs to be cloned and sent to another thread or kept within an iterator. # User-Facing Changes Shouldn't be any, since it's all internal stuff, but it does change some public interfaces so it's a breaking change
This commit is contained in:
parent
a29efe28f7
commit
cf321ab510
@ -229,7 +229,7 @@ pub fn find_non_whitespace_index(contents: &[u8], start: usize) -> usize {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_passthrough_command(working_set_file_contents: &[(Vec<u8>, usize, usize)]) -> bool {
|
||||
pub fn is_passthrough_command(working_set_file_contents: &[(Arc<Vec<u8>>, usize, usize)]) -> bool {
|
||||
for (contents, _, _) in working_set_file_contents {
|
||||
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
|
||||
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
|
||||
|
@ -124,9 +124,9 @@ impl NuCompleter {
|
||||
|
||||
let output = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
|
||||
|
||||
for pipeline in output.pipelines.into_iter() {
|
||||
for pipeline_element in pipeline.elements {
|
||||
let flattened = flatten_pipeline_element(&working_set, &pipeline_element);
|
||||
for pipeline in output.pipelines.iter() {
|
||||
for pipeline_element in &pipeline.elements {
|
||||
let flattened = flatten_pipeline_element(&working_set, pipeline_element);
|
||||
let mut spans: Vec<String> = vec![];
|
||||
|
||||
for (flat_idx, flat) in flattened.iter().enumerate() {
|
||||
|
@ -1,3 +1,5 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::util::eval_source;
|
||||
use log::info;
|
||||
use log::trace;
|
||||
@ -117,7 +119,7 @@ pub fn evaluate_file(
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
for block in &mut working_set.delta.blocks {
|
||||
for block in working_set.delta.blocks.iter_mut().map(Arc::make_mut) {
|
||||
if block.signature.name == "main" {
|
||||
block.signature.name = source_filename.to_string_lossy().to_string();
|
||||
} else if block.signature.name.starts_with("main ") {
|
||||
|
@ -1,3 +1,5 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::util::get_guaranteed_cwd;
|
||||
use miette::Result;
|
||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||
@ -36,8 +38,7 @@ pub fn eval_env_change_hook(
|
||||
"env_change",
|
||||
)?;
|
||||
|
||||
engine_state
|
||||
.previous_env_vars
|
||||
Arc::make_mut(&mut engine_state.previous_env_vars)
|
||||
.insert(env_name.to_string(), after);
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ use nu_protocol::{
|
||||
PipelineIterator, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||
};
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct UpdateCells;
|
||||
@ -102,7 +103,7 @@ impl Command for UpdateCells {
|
||||
|
||||
let metadata = input.metadata();
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
let block: Block = engine_state.get_block(block.block_id).clone();
|
||||
let block: Arc<Block> = engine_state.get_block(block.block_id).clone();
|
||||
let eval_block_fn = get_eval_block(&engine_state);
|
||||
|
||||
let span = call.head;
|
||||
@ -140,7 +141,7 @@ struct UpdateCellIterator {
|
||||
columns: Option<HashSet<String>>,
|
||||
engine_state: EngineState,
|
||||
stack: Stack,
|
||||
block: Block,
|
||||
block: Arc<Block>,
|
||||
eval_block_fn: EvalBlockFn,
|
||||
span: Span,
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ use nu_protocol::{
|
||||
Example, PipelineData, Signature, Span, Type, Value,
|
||||
};
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub fn check_example_input_and_output_types_match_command_signature(
|
||||
example: &Example,
|
||||
@ -76,7 +77,9 @@ fn eval_pipeline_without_terminal_expression(
|
||||
let (mut block, delta) = parse(src, engine_state);
|
||||
if block.pipelines.len() == 1 {
|
||||
let n_expressions = block.pipelines[0].elements.len();
|
||||
block.pipelines[0].elements.truncate(&n_expressions - 1);
|
||||
Arc::make_mut(&mut block).pipelines[0]
|
||||
.elements
|
||||
.truncate(&n_expressions - 1);
|
||||
|
||||
if !block.pipelines[0].elements.is_empty() {
|
||||
let empty_input = PipelineData::empty();
|
||||
@ -90,7 +93,7 @@ fn eval_pipeline_without_terminal_expression(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(contents: &str, engine_state: &EngineState) -> (Block, StateDelta) {
|
||||
pub fn parse(contents: &str, engine_state: &EngineState) -> (Arc<Block>, StateDelta) {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
let output = nu_parser::parse(&mut working_set, None, contents.as_bytes(), false);
|
||||
|
||||
@ -102,7 +105,7 @@ pub fn parse(contents: &str, engine_state: &EngineState) -> (Block, StateDelta)
|
||||
}
|
||||
|
||||
pub fn eval_block(
|
||||
block: Block,
|
||||
block: Arc<Block>,
|
||||
input: PipelineData,
|
||||
cwd: &std::path::Path,
|
||||
engine_state: &mut Box<EngineState>,
|
||||
|
@ -53,9 +53,9 @@ impl Command for Ast {
|
||||
if to_json {
|
||||
// Get the block as json
|
||||
let serde_block_str = if minify {
|
||||
serde_json::to_string(&block_output)
|
||||
serde_json::to_string(&*block_output)
|
||||
} else {
|
||||
serde_json::to_string_pretty(&block_output)
|
||||
serde_json::to_string_pretty(&*block_output)
|
||||
};
|
||||
let block_json = match serde_block_str {
|
||||
Ok(json) => json,
|
||||
|
@ -46,7 +46,7 @@ impl Command for ViewFiles {
|
||||
for (file, start, end) in engine_state.files() {
|
||||
records.push(Value::record(
|
||||
record! {
|
||||
"filename" => Value::string(file, call.head),
|
||||
"filename" => Value::string(&**file, call.head),
|
||||
"start" => Value::int(*start as i64, call.head),
|
||||
"end" => Value::int(*end as i64, call.head),
|
||||
"size" => Value::int(*end as i64 - *start as i64, call.head),
|
||||
|
@ -1,3 +1,5 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use nu_protocol::ast::{Call, Expr, Expression, RecordItem};
|
||||
use nu_protocol::engine::{Command, EngineState, Stack, StateWorkingSet};
|
||||
use nu_protocol::{
|
||||
@ -97,7 +99,7 @@ impl Command for FromNuon {
|
||||
ty: Type::Nothing,
|
||||
}
|
||||
} else {
|
||||
let mut pipeline = block.pipelines.remove(0);
|
||||
let mut pipeline = Arc::make_mut(&mut block).pipelines.remove(0);
|
||||
|
||||
if let Some(expr) = pipeline.elements.get(1) {
|
||||
return Err(ShellError::GenericError {
|
||||
|
@ -18,6 +18,7 @@ use std::io::{BufRead, BufReader, Read, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command as CommandSys, Stdio};
|
||||
use std::sync::mpsc;
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -442,7 +443,7 @@ impl ExternalCommand {
|
||||
let mut stack = stack.clone();
|
||||
|
||||
// Turn off color as we pass data through
|
||||
engine_state.config.use_ansi_coloring = false;
|
||||
Arc::make_mut(&mut engine_state.config).use_ansi_coloring = false;
|
||||
|
||||
// Pipe input into the external command's stdin
|
||||
if let Some(mut stdin_write) = child.as_mut().stdin.take() {
|
||||
|
@ -1,5 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use nu_protocol::ast::{Call, Expr};
|
||||
use nu_protocol::engine::{EngineState, Stack, StateWorkingSet, PWD_ENV};
|
||||
@ -68,7 +69,8 @@ pub fn convert_env_values(engine_state: &mut EngineState, stack: &Stack) -> Opti
|
||||
}
|
||||
|
||||
if let Ok(last_overlay_name) = &stack.last_overlay_name() {
|
||||
if let Some(env_vars) = engine_state.env_vars.get_mut(last_overlay_name) {
|
||||
if let Some(env_vars) = Arc::make_mut(&mut engine_state.env_vars).get_mut(last_overlay_name)
|
||||
{
|
||||
for (k, v) in new_scope {
|
||||
env_vars.insert(k, v);
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::debugger::WithoutDebug;
|
||||
@ -88,7 +90,8 @@ fn eval_source2(
|
||||
//
|
||||
// So we LITERALLY ignore all expressions except the LAST.
|
||||
if block.len() > 1 {
|
||||
block.pipelines.drain(..block.pipelines.len() - 1);
|
||||
let range = ..block.pipelines.len() - 1;
|
||||
Arc::make_mut(&mut block).pipelines.drain(range);
|
||||
}
|
||||
|
||||
let stack = &mut stack.push_redirection(
|
||||
|
@ -287,7 +287,7 @@ impl LanguageServer {
|
||||
for (file_path, file_start, file_end) in working_set.files() {
|
||||
if span.start >= *file_start && span.start < *file_end {
|
||||
return Some(GotoDefinitionResponse::Scalar(Location {
|
||||
uri: Url::from_file_path(file_path).ok()?,
|
||||
uri: Url::from_file_path(&**file_path).ok()?,
|
||||
range: Self::span_to_range(span, file, *file_start),
|
||||
}));
|
||||
}
|
||||
|
@ -18,8 +18,11 @@ use nu_protocol::{
|
||||
span, Alias, BlockId, DeclId, Module, ModuleId, ParseError, PositionalArg,
|
||||
ResolvedImportPattern, Span, Spanned, SyntaxShape, Type, Value, VarId,
|
||||
};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
pub const LIB_DIRS_VAR: &str = "NU_LIB_DIRS";
|
||||
#[cfg(feature = "plugin")]
|
||||
@ -1903,7 +1906,7 @@ fn parse_module_file(
|
||||
// Restore the currently parsed directory back
|
||||
working_set.currently_parsed_cwd = prev_currently_parsed_cwd;
|
||||
|
||||
let _ = working_set.add_block(block);
|
||||
let _ = working_set.add_block(Arc::new(block));
|
||||
let module_id = working_set.add_module(&module_name, module, module_comments);
|
||||
|
||||
Some(module_id)
|
||||
@ -2153,7 +2156,7 @@ pub fn parse_module(
|
||||
let (block, module, inner_comments) =
|
||||
parse_module_block(working_set, block_span, module_name.as_bytes());
|
||||
|
||||
let block_id = working_set.add_block(block);
|
||||
let block_id = working_set.add_block(Arc::new(block));
|
||||
|
||||
module_comments.extend(inner_comments);
|
||||
let module_id = working_set.add_module(&module_name, module, module_comments);
|
||||
@ -2962,7 +2965,7 @@ pub fn parse_let(working_set: &mut StateWorkingSet, spans: &[Span]) -> Pipeline
|
||||
|
||||
let output_type = rvalue_block.output_type();
|
||||
|
||||
let block_id = working_set.add_block(rvalue_block);
|
||||
let block_id = working_set.add_block(Arc::new(rvalue_block));
|
||||
|
||||
let rvalue = Expression {
|
||||
expr: Expr::Block(block_id),
|
||||
@ -3219,7 +3222,7 @@ pub fn parse_mut(working_set: &mut StateWorkingSet, spans: &[Span]) -> Pipeline
|
||||
|
||||
let output_type = rvalue_block.output_type();
|
||||
|
||||
let block_id = working_set.add_block(rvalue_block);
|
||||
let block_id = working_set.add_block(Arc::new(rvalue_block));
|
||||
|
||||
let rvalue = Expression {
|
||||
expr: Expr::Block(block_id),
|
||||
@ -3516,8 +3519,6 @@ pub fn parse_where(working_set: &mut StateWorkingSet, lite_command: &LiteCommand
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
pub fn parse_register(working_set: &mut StateWorkingSet, lite_command: &LiteCommand) -> Pipeline {
|
||||
use std::sync::Arc;
|
||||
|
||||
use nu_plugin::{get_signature, PersistentPlugin, PluginDeclaration};
|
||||
use nu_protocol::{
|
||||
engine::Stack, IntoSpanned, PluginIdentity, PluginSignature, RegisteredPlugin,
|
||||
|
@ -31,7 +31,10 @@ use crate::parse_keywords::{
|
||||
|
||||
use itertools::Itertools;
|
||||
use log::trace;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::{num::ParseIntError, str};
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
@ -2096,7 +2099,7 @@ pub fn parse_full_cell_path(
|
||||
|
||||
let ty = output.output_type();
|
||||
|
||||
let block_id = working_set.add_block(output);
|
||||
let block_id = working_set.add_block(Arc::new(output));
|
||||
tokens.next();
|
||||
|
||||
(
|
||||
@ -3146,7 +3149,7 @@ pub fn parse_row_condition(working_set: &mut StateWorkingSet, spans: &[Span]) ->
|
||||
default_value: None,
|
||||
});
|
||||
|
||||
working_set.add_block(block)
|
||||
working_set.add_block(Arc::new(block))
|
||||
}
|
||||
};
|
||||
|
||||
@ -4136,7 +4139,7 @@ pub fn parse_block_expression(working_set: &mut StateWorkingSet, span: Span) ->
|
||||
|
||||
working_set.exit_scope();
|
||||
|
||||
let block_id = working_set.add_block(output);
|
||||
let block_id = working_set.add_block(Arc::new(output));
|
||||
|
||||
Expression {
|
||||
expr: Expr::Block(block_id),
|
||||
@ -4477,7 +4480,7 @@ pub fn parse_closure_expression(
|
||||
|
||||
working_set.exit_scope();
|
||||
|
||||
let block_id = working_set.add_block(output);
|
||||
let block_id = working_set.add_block(Arc::new(output));
|
||||
|
||||
Expression {
|
||||
expr: Expr::Closure(block_id),
|
||||
@ -5177,7 +5180,7 @@ pub fn parse_expression(working_set: &mut StateWorkingSet, spans: &[Span]) -> Ex
|
||||
let ty = output.ty.clone();
|
||||
block.pipelines = vec![Pipeline::from_vec(vec![output])];
|
||||
|
||||
let block_id = working_set.add_block(block);
|
||||
let block_id = working_set.add_block(Arc::new(block));
|
||||
|
||||
let mut env_vars = vec![];
|
||||
for sh in shorthand {
|
||||
@ -6176,7 +6179,7 @@ fn wrap_expr_with_collect(working_set: &mut StateWorkingSet, expr: &Expression)
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let block_id = working_set.add_block(block);
|
||||
let block_id = working_set.add_block(Arc::new(block));
|
||||
|
||||
output.push(Argument::Positional(Expression {
|
||||
expr: Expr::Closure(block_id),
|
||||
@ -6221,7 +6224,7 @@ pub fn parse(
|
||||
fname: Option<&str>,
|
||||
contents: &[u8],
|
||||
scoped: bool,
|
||||
) -> Block {
|
||||
) -> Arc<Block> {
|
||||
let name = match fname {
|
||||
Some(fname) => {
|
||||
// use the canonical name for this filename
|
||||
@ -6246,7 +6249,7 @@ pub fn parse(
|
||||
working_set.error(err)
|
||||
}
|
||||
|
||||
parse_block(working_set, &output, new_span, scoped, false)
|
||||
Arc::new(parse_block(working_set, &output, new_span, scoped, false))
|
||||
}
|
||||
};
|
||||
|
||||
@ -6261,7 +6264,10 @@ pub fn parse(
|
||||
&mut seen_blocks,
|
||||
&mut captures,
|
||||
) {
|
||||
Ok(_) => output.captures = captures.into_iter().map(|(var_id, _)| var_id).collect(),
|
||||
Ok(_) => {
|
||||
Arc::make_mut(&mut output).captures =
|
||||
captures.into_iter().map(|(var_id, _)| var_id).collect();
|
||||
}
|
||||
Err(err) => working_set.error(err),
|
||||
}
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{Argument, Expr, ExternalArgument, RecordItem};
|
||||
@ -325,7 +327,7 @@ impl Expression {
|
||||
expr.replace_span(working_set, replaced, new_span);
|
||||
}
|
||||
Expr::Block(block_id) => {
|
||||
let mut block = working_set.get_block(*block_id).clone();
|
||||
let mut block = (**working_set.get_block(*block_id)).clone();
|
||||
|
||||
for pipeline in block.pipelines.iter_mut() {
|
||||
for element in pipeline.elements.iter_mut() {
|
||||
@ -333,10 +335,10 @@ impl Expression {
|
||||
}
|
||||
}
|
||||
|
||||
*block_id = working_set.add_block(block);
|
||||
*block_id = working_set.add_block(Arc::new(block));
|
||||
}
|
||||
Expr::Closure(block_id) => {
|
||||
let mut block = working_set.get_block(*block_id).clone();
|
||||
let mut block = (**working_set.get_block(*block_id)).clone();
|
||||
|
||||
for pipeline in block.pipelines.iter_mut() {
|
||||
for element in pipeline.elements.iter_mut() {
|
||||
@ -344,7 +346,7 @@ impl Expression {
|
||||
}
|
||||
}
|
||||
|
||||
*block_id = working_set.add_block(block);
|
||||
*block_id = working_set.add_block(Arc::new(block));
|
||||
}
|
||||
Expr::Binary(_) => {}
|
||||
Expr::Bool(_) => {}
|
||||
@ -429,7 +431,7 @@ impl Expression {
|
||||
}
|
||||
}
|
||||
Expr::RowCondition(block_id) | Expr::Subexpression(block_id) => {
|
||||
let mut block = working_set.get_block(*block_id).clone();
|
||||
let mut block = (**working_set.get_block(*block_id)).clone();
|
||||
|
||||
for pipeline in block.pipelines.iter_mut() {
|
||||
for element in pipeline.elements.iter_mut() {
|
||||
@ -437,7 +439,7 @@ impl Expression {
|
||||
}
|
||||
}
|
||||
|
||||
*block_id = working_set.add_block(block);
|
||||
*block_id = working_set.add_block(Arc::new(block));
|
||||
}
|
||||
Expr::Table(headers, cells) => {
|
||||
for header in headers {
|
||||
|
@ -64,23 +64,29 @@ impl Clone for IsDebugging {
|
||||
/// will refer to the corresponding IDs rather than their definitions directly. At runtime, this means
|
||||
/// less copying and smaller structures.
|
||||
///
|
||||
/// Many of the larger objects in this structure are stored within `Arc` to decrease the cost of
|
||||
/// cloning `EngineState`. While `Arc`s are generally immutable, they can be modified using
|
||||
/// `Arc::make_mut`, which automatically clones to a new allocation if there are other copies of
|
||||
/// the `Arc` already in use, but will let us modify the `Arc` directly if we have the only
|
||||
/// reference to it.
|
||||
///
|
||||
/// Note that the runtime stack is not part of this global state. Runtime stacks are handled differently,
|
||||
/// but they also rely on using IDs rather than full definitions.
|
||||
#[derive(Clone)]
|
||||
pub struct EngineState {
|
||||
files: Vec<(String, usize, usize)>,
|
||||
file_contents: Vec<(Vec<u8>, usize, usize)>,
|
||||
files: Vec<(Arc<String>, usize, usize)>,
|
||||
file_contents: Vec<(Arc<Vec<u8>>, usize, usize)>,
|
||||
pub(super) virtual_paths: Vec<(String, VirtualPath)>,
|
||||
vars: Vec<Variable>,
|
||||
decls: Vec<Box<dyn Command + 'static>>,
|
||||
pub(super) blocks: Vec<Block>,
|
||||
pub(super) modules: Vec<Module>,
|
||||
decls: Arc<Vec<Box<dyn Command + 'static>>>,
|
||||
pub(super) blocks: Vec<Arc<Block>>,
|
||||
pub(super) modules: Vec<Arc<Module>>,
|
||||
usage: Usage,
|
||||
pub scope: ScopeFrame,
|
||||
pub ctrlc: Option<Arc<AtomicBool>>,
|
||||
pub env_vars: EnvVars,
|
||||
pub previous_env_vars: HashMap<String, Value>,
|
||||
pub config: Config,
|
||||
pub env_vars: Arc<EnvVars>,
|
||||
pub previous_env_vars: Arc<HashMap<String, Value>>,
|
||||
pub config: Arc<Config>,
|
||||
pub pipeline_externals_state: Arc<(AtomicU32, AtomicU32)>,
|
||||
pub repl_state: Arc<Mutex<ReplState>>,
|
||||
pub table_decl_id: Option<usize>,
|
||||
@ -122,9 +128,11 @@ impl EngineState {
|
||||
Variable::new(Span::new(0, 0), Type::Any, false),
|
||||
Variable::new(Span::new(0, 0), Type::Any, false),
|
||||
],
|
||||
decls: vec![],
|
||||
decls: Arc::new(vec![]),
|
||||
blocks: vec![],
|
||||
modules: vec![Module::new(DEFAULT_OVERLAY_NAME.as_bytes().to_vec())],
|
||||
modules: vec![Arc::new(Module::new(
|
||||
DEFAULT_OVERLAY_NAME.as_bytes().to_vec(),
|
||||
))],
|
||||
usage: Usage::new(),
|
||||
// make sure we have some default overlay:
|
||||
scope: ScopeFrame::with_empty_overlay(
|
||||
@ -133,11 +141,13 @@ impl EngineState {
|
||||
false,
|
||||
),
|
||||
ctrlc: None,
|
||||
env_vars: [(DEFAULT_OVERLAY_NAME.to_string(), HashMap::new())]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
previous_env_vars: HashMap::new(),
|
||||
config: Config::default(),
|
||||
env_vars: Arc::new(
|
||||
[(DEFAULT_OVERLAY_NAME.to_string(), HashMap::new())]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
),
|
||||
previous_env_vars: Arc::new(HashMap::new()),
|
||||
config: Arc::new(Config::default()),
|
||||
pipeline_externals_state: Arc::new((AtomicU32::new(0), AtomicU32::new(0))),
|
||||
repl_state: Arc::new(Mutex::new(ReplState {
|
||||
buffer: "".to_string(),
|
||||
@ -175,12 +185,16 @@ impl EngineState {
|
||||
self.files.extend(delta.files);
|
||||
self.file_contents.extend(delta.file_contents);
|
||||
self.virtual_paths.extend(delta.virtual_paths);
|
||||
self.decls.extend(delta.decls);
|
||||
self.vars.extend(delta.vars);
|
||||
self.blocks.extend(delta.blocks);
|
||||
self.modules.extend(delta.modules);
|
||||
self.usage.merge_with(delta.usage);
|
||||
|
||||
// Avoid potentially cloning the Arc if we aren't adding anything
|
||||
if !delta.decls.is_empty() {
|
||||
Arc::make_mut(&mut self.decls).extend(delta.decls);
|
||||
}
|
||||
|
||||
let first = delta.scope.remove(0);
|
||||
|
||||
for (delta_name, delta_overlay) in first.clone().overlays {
|
||||
@ -268,7 +282,7 @@ impl EngineState {
|
||||
|
||||
for mut scope in stack.env_vars.drain(..) {
|
||||
for (overlay_name, mut env) in scope.drain() {
|
||||
if let Some(env_vars) = self.env_vars.get_mut(&overlay_name) {
|
||||
if let Some(env_vars) = Arc::make_mut(&mut self.env_vars).get_mut(&overlay_name) {
|
||||
// Updating existing overlay
|
||||
for (k, v) in env.drain() {
|
||||
if k == "config" {
|
||||
@ -276,7 +290,7 @@ impl EngineState {
|
||||
// Instead, mutate a clone of it with into_config(), and put THAT in env_vars.
|
||||
let mut new_record = v.clone();
|
||||
let (config, error) = new_record.into_config(&self.config);
|
||||
self.config = config;
|
||||
self.config = Arc::new(config);
|
||||
config_updated = true;
|
||||
env_vars.insert(k, new_record);
|
||||
if let Some(e) = error {
|
||||
@ -288,7 +302,7 @@ impl EngineState {
|
||||
}
|
||||
} else {
|
||||
// Pushing a new overlay
|
||||
self.env_vars.insert(overlay_name, env);
|
||||
Arc::make_mut(&mut self.env_vars).insert(overlay_name, env);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -422,10 +436,10 @@ impl EngineState {
|
||||
pub fn add_env_var(&mut self, name: String, val: Value) {
|
||||
let overlay_name = String::from_utf8_lossy(self.last_overlay_name(&[])).to_string();
|
||||
|
||||
if let Some(env_vars) = self.env_vars.get_mut(&overlay_name) {
|
||||
if let Some(env_vars) = Arc::make_mut(&mut self.env_vars).get_mut(&overlay_name) {
|
||||
env_vars.insert(name, val);
|
||||
} else {
|
||||
self.env_vars
|
||||
Arc::make_mut(&mut self.env_vars)
|
||||
.insert(overlay_name, [(name, val)].into_iter().collect());
|
||||
}
|
||||
}
|
||||
@ -752,7 +766,7 @@ impl EngineState {
|
||||
self.update_plugin_gc_configs(&conf.plugin_gc);
|
||||
}
|
||||
|
||||
self.config = conf;
|
||||
self.config = Arc::new(conf);
|
||||
}
|
||||
|
||||
/// Fetch the configuration for a plugin
|
||||
@ -867,7 +881,7 @@ impl EngineState {
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_block(&self, block_id: BlockId) -> &Block {
|
||||
pub fn get_block(&self, block_id: BlockId) -> &Arc<Block> {
|
||||
self.blocks
|
||||
.get(block_id)
|
||||
.expect("internal error: missing block")
|
||||
@ -878,7 +892,7 @@ impl EngineState {
|
||||
/// Prefer to use [`.get_block()`] in most cases - `BlockId`s that don't exist are normally a
|
||||
/// compiler error. This only exists to stop plugins from crashing the engine if they send us
|
||||
/// something invalid.
|
||||
pub fn try_get_block(&self, block_id: BlockId) -> Option<&Block> {
|
||||
pub fn try_get_block(&self, block_id: BlockId) -> Option<&Arc<Block>> {
|
||||
self.blocks.get(block_id)
|
||||
}
|
||||
|
||||
@ -902,7 +916,7 @@ impl EngineState {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn files(&self) -> impl Iterator<Item = &(String, usize, usize)> {
|
||||
pub fn files(&self) -> impl Iterator<Item = &(Arc<String>, usize, usize)> {
|
||||
self.files.iter()
|
||||
}
|
||||
|
||||
@ -911,9 +925,10 @@ impl EngineState {
|
||||
let next_span_end = next_span_start + contents.len();
|
||||
|
||||
self.file_contents
|
||||
.push((contents, next_span_start, next_span_end));
|
||||
.push((Arc::new(contents), next_span_start, next_span_end));
|
||||
|
||||
self.files.push((filename, next_span_start, next_span_end));
|
||||
self.files
|
||||
.push((Arc::new(filename), next_span_start, next_span_end));
|
||||
|
||||
self.num_files() - 1
|
||||
}
|
||||
@ -953,7 +968,7 @@ impl EngineState {
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn get_file_contents(&self) -> &[(Vec<u8>, usize, usize)] {
|
||||
pub fn get_file_contents(&self) -> &[(Arc<Vec<u8>>, usize, usize)] {
|
||||
&self.file_contents
|
||||
}
|
||||
|
||||
@ -1051,8 +1066,8 @@ mod engine_state_tests {
|
||||
engine_state.merge_delta(delta)?;
|
||||
|
||||
assert_eq!(engine_state.num_files(), 2);
|
||||
assert_eq!(&engine_state.files[0].0, "test.nu");
|
||||
assert_eq!(&engine_state.files[1].0, "child.nu");
|
||||
assert_eq!(&*engine_state.files[0].0, "test.nu");
|
||||
assert_eq!(&*engine_state.files[1].0, "child.nu");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -12,13 +12,13 @@ use crate::RegisteredPlugin;
|
||||
/// can be applied to the global state to update it to contain both previous state and the state held
|
||||
/// within the delta.
|
||||
pub struct StateDelta {
|
||||
pub(super) files: Vec<(String, usize, usize)>,
|
||||
pub(crate) file_contents: Vec<(Vec<u8>, usize, usize)>,
|
||||
pub(super) files: Vec<(Arc<String>, usize, usize)>,
|
||||
pub(crate) file_contents: Vec<(Arc<Vec<u8>>, usize, usize)>,
|
||||
pub(super) virtual_paths: Vec<(String, VirtualPath)>,
|
||||
pub(super) vars: Vec<Variable>, // indexed by VarId
|
||||
pub(super) decls: Vec<Box<dyn Command>>, // indexed by DeclId
|
||||
pub blocks: Vec<Block>, // indexed by BlockId
|
||||
pub(super) modules: Vec<Module>, // indexed by ModuleId
|
||||
pub blocks: Vec<Arc<Block>>, // indexed by BlockId
|
||||
pub(super) modules: Vec<Arc<Module>>, // indexed by ModuleId
|
||||
pub(super) usage: Usage,
|
||||
pub scope: Vec<ScopeFrame>,
|
||||
#[cfg(feature = "plugin")]
|
||||
@ -131,7 +131,7 @@ impl StateDelta {
|
||||
self.scope.pop();
|
||||
}
|
||||
|
||||
pub fn get_file_contents(&self) -> &[(Vec<u8>, usize, usize)] {
|
||||
pub fn get_file_contents(&self) -> &[(Arc<Vec<u8>>, usize, usize)] {
|
||||
&self.file_contents
|
||||
}
|
||||
}
|
||||
|
@ -254,7 +254,7 @@ impl<'a> StateWorkingSet<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_block(&mut self, block: Block) -> BlockId {
|
||||
pub fn add_block(&mut self, block: Arc<Block>) -> BlockId {
|
||||
self.delta.blocks.push(block);
|
||||
|
||||
self.num_blocks() - 1
|
||||
@ -263,7 +263,7 @@ impl<'a> StateWorkingSet<'a> {
|
||||
pub fn add_module(&mut self, name: &str, module: Module, comments: Vec<Span>) -> ModuleId {
|
||||
let name = name.as_bytes().to_vec();
|
||||
|
||||
self.delta.modules.push(module);
|
||||
self.delta.modules.push(Arc::new(module));
|
||||
let module_id = self.num_modules() - 1;
|
||||
|
||||
if !comments.is_empty() {
|
||||
@ -296,7 +296,7 @@ impl<'a> StateWorkingSet<'a> {
|
||||
self.permanent_state.next_span_start()
|
||||
}
|
||||
|
||||
pub fn files(&'a self) -> impl Iterator<Item = &(String, usize, usize)> {
|
||||
pub fn files(&'a self) -> impl Iterator<Item = &(Arc<String>, usize, usize)> {
|
||||
self.permanent_state.files().chain(self.delta.files.iter())
|
||||
}
|
||||
|
||||
@ -320,7 +320,7 @@ impl<'a> StateWorkingSet<'a> {
|
||||
pub fn add_file(&mut self, filename: String, contents: &[u8]) -> FileId {
|
||||
// First, look for the file to see if we already have it
|
||||
for (idx, (fname, file_start, file_end)) in self.files().enumerate() {
|
||||
if fname == &filename {
|
||||
if **fname == filename {
|
||||
let prev_contents = self.get_span_contents(Span::new(*file_start, *file_end));
|
||||
if prev_contents == contents {
|
||||
return idx;
|
||||
@ -331,13 +331,15 @@ impl<'a> StateWorkingSet<'a> {
|
||||
let next_span_start = self.next_span_start();
|
||||
let next_span_end = next_span_start + contents.len();
|
||||
|
||||
self.delta
|
||||
.file_contents
|
||||
.push((contents.to_vec(), next_span_start, next_span_end));
|
||||
self.delta.file_contents.push((
|
||||
Arc::new(contents.to_vec()),
|
||||
next_span_start,
|
||||
next_span_end,
|
||||
));
|
||||
|
||||
self.delta
|
||||
.files
|
||||
.push((filename, next_span_start, next_span_end));
|
||||
.push((Arc::new(filename), next_span_start, next_span_end));
|
||||
|
||||
self.num_files() - 1
|
||||
}
|
||||
@ -353,7 +355,7 @@ impl<'a> StateWorkingSet<'a> {
|
||||
let (file_id, ..) = self
|
||||
.files()
|
||||
.enumerate()
|
||||
.find(|(_, (fname, _, _))| fname == filename)?;
|
||||
.find(|(_, (fname, _, _))| **fname == filename)?;
|
||||
|
||||
Some(self.get_span_for_file(file_id))
|
||||
}
|
||||
@ -626,8 +628,8 @@ impl<'a> StateWorkingSet<'a> {
|
||||
pub fn list_env(&self) -> Vec<String> {
|
||||
let mut env_vars = vec![];
|
||||
|
||||
for env_var in self.permanent_state.env_vars.clone().into_iter() {
|
||||
env_vars.push(env_var.0)
|
||||
for env_var in self.permanent_state.env_vars.iter() {
|
||||
env_vars.push(env_var.0.clone());
|
||||
}
|
||||
|
||||
env_vars
|
||||
@ -742,7 +744,7 @@ impl<'a> StateWorkingSet<'a> {
|
||||
output
|
||||
}
|
||||
|
||||
pub fn get_block(&self, block_id: BlockId) -> &Block {
|
||||
pub fn get_block(&self, block_id: BlockId) -> &Arc<Block> {
|
||||
let num_permanent_blocks = self.permanent_state.num_blocks();
|
||||
if block_id < num_permanent_blocks {
|
||||
self.permanent_state.get_block(block_id)
|
||||
@ -774,6 +776,7 @@ impl<'a> StateWorkingSet<'a> {
|
||||
self.delta
|
||||
.blocks
|
||||
.get_mut(block_id - num_permanent_blocks)
|
||||
.map(Arc::make_mut)
|
||||
.expect("internal error: missing block")
|
||||
}
|
||||
}
|
||||
@ -957,7 +960,7 @@ impl<'a> StateWorkingSet<'a> {
|
||||
build_usage(&comment_lines)
|
||||
}
|
||||
|
||||
pub fn find_block_by_span(&self, span: Span) -> Option<Block> {
|
||||
pub fn find_block_by_span(&self, span: Span) -> Option<Arc<Block>> {
|
||||
for block in &self.delta.blocks {
|
||||
if Some(span) == block.span {
|
||||
return Some(block.clone());
|
||||
@ -1063,7 +1066,7 @@ impl<'a> miette::SourceCode for &StateWorkingSet<'a> {
|
||||
}
|
||||
|
||||
let data = span_contents.data();
|
||||
if filename == "<cli>" {
|
||||
if **filename == "<cli>" {
|
||||
if debugging {
|
||||
let success_cli = "Successfully read CLI span";
|
||||
dbg!(success_cli, String::from_utf8_lossy(data));
|
||||
@ -1081,7 +1084,7 @@ impl<'a> miette::SourceCode for &StateWorkingSet<'a> {
|
||||
dbg!(success_file);
|
||||
}
|
||||
return Ok(Box::new(miette::MietteSpanContents::new_named(
|
||||
filename.clone(),
|
||||
(**filename).clone(),
|
||||
data,
|
||||
retranslated,
|
||||
span_contents.line(),
|
||||
|
@ -11,6 +11,7 @@ use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::panic::{catch_unwind, AssertUnwindSafe};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub(crate) const NUSHELL_FOLDER: &str = "nushell";
|
||||
const CONFIG_FILE: &str = "config.nu";
|
||||
@ -210,7 +211,7 @@ pub(crate) fn setup_config(
|
||||
eprintln!(
|
||||
"A panic occurred while reading configuration files, using default configuration."
|
||||
);
|
||||
engine_state.config = Config::default()
|
||||
engine_state.config = Arc::new(Config::default())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -165,7 +165,7 @@ pub fn goto_def(engine_state: &mut EngineState, file_path: &str, location: &Valu
|
||||
"{}",
|
||||
json!(
|
||||
{
|
||||
"file": file.0,
|
||||
"file": &**file.0,
|
||||
"start": span.start - file.1,
|
||||
"end": span.end - file.1
|
||||
}
|
||||
@ -185,7 +185,7 @@ pub fn goto_def(engine_state: &mut EngineState, file_path: &str, location: &Valu
|
||||
"{}",
|
||||
json!(
|
||||
{
|
||||
"file": file.0,
|
||||
"file": &**file.0,
|
||||
"start": var.declaration_span.start - file.1,
|
||||
"end": var.declaration_span.end - file.1
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user