Merge branch 'main' into env_conversion_on_access

This commit is contained in:
Bahex 2024-12-17 03:30:58 +03:00 committed by GitHub
commit 85489b91d1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
37 changed files with 384 additions and 480 deletions

View File

@ -174,14 +174,6 @@ pub fn complete_item(
) -> Vec<FileSuggestion> { ) -> Vec<FileSuggestion> {
let cleaned_partial = surround_remove(partial); let cleaned_partial = surround_remove(partial);
let isdir = cleaned_partial.ends_with(is_separator); let isdir = cleaned_partial.ends_with(is_separator);
#[cfg(windows)]
let cleaned_partial = if let Some(absolute_partial) =
stack.pwd_per_drive.expand_pwd(Path::new(&cleaned_partial))
{
absolute_partial.display().to_string()
} else {
cleaned_partial
};
let expanded_partial = expand_ndots(Path::new(&cleaned_partial)); let expanded_partial = expand_ndots(Path::new(&cleaned_partial));
let should_collapse_dots = expanded_partial != Path::new(&cleaned_partial); let should_collapse_dots = expanded_partial != Path::new(&cleaned_partial);
let mut partial = expanded_partial.to_string_lossy().to_string(); let mut partial = expanded_partial.to_string_lossy().to_string();

View File

@ -832,12 +832,6 @@ fn do_auto_cd(
engine_state: &mut EngineState, engine_state: &mut EngineState,
span: Span, span: Span,
) { ) {
#[cfg(windows)]
let path = if let Some(abs_path) = stack.pwd_per_drive.expand_pwd(path.as_path()) {
abs_path
} else {
path
};
let path = { let path = {
if !path.exists() { if !path.exists() {
report_shell_error( report_shell_error(

View File

@ -25,7 +25,7 @@ impl Command for EachWhile {
)]) )])
.required( .required(
"closure", "closure",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"the closure to run", "the closure to run",
) )
.category(Category::Filters) .category(Category::Filters)

View File

@ -87,7 +87,7 @@ impl Command for Cd {
}); });
} }
} else { } else {
let path = stack.expand_path_with(path_no_whitespace, &cwd, true); let path = nu_path::expand_path_with(path_no_whitespace, &cwd, true);
if !path.exists() { if !path.exists() {
return Err(ShellError::DirectoryNotFound { return Err(ShellError::DirectoryNotFound {
dir: path_no_whitespace.to_string(), dir: path_no_whitespace.to_string(),

View File

@ -14,7 +14,7 @@ impl Command for All {
.input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Bool)]) .input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Bool)])
.required( .required(
"predicate", "predicate",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"A closure that must evaluate to a boolean.", "A closure that must evaluate to a boolean.",
) )
.category(Category::Filters) .category(Category::Filters)

View File

@ -14,7 +14,7 @@ impl Command for Any {
.input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Bool)]) .input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Bool)])
.required( .required(
"predicate", "predicate",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"A closure that must evaluate to a boolean.", "A closure that must evaluate to a boolean.",
) )
.category(Category::Filters) .category(Category::Filters)

View File

@ -30,7 +30,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
]) ])
.required( .required(
"closure", "closure",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"Predicate closure.", "Predicate closure.",
) )
.category(Category::Filters) .category(Category::Filters)

View File

@ -38,7 +38,7 @@ impl Command for ParEach {
) )
.required( .required(
"closure", "closure",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"The closure to run.", "The closure to run.",
) )
.allow_variants_without_examples(true) .allow_variants_without_examples(true)

View File

@ -24,11 +24,7 @@ impl Command for Reduce {
) )
.required( .required(
"closure", "closure",
SyntaxShape::Closure(Some(vec![ SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Any])),
SyntaxShape::Any,
SyntaxShape::Any,
SyntaxShape::Int,
])),
"Reducing function.", "Reducing function.",
) )
.allow_variants_without_examples(true) .allow_variants_without_examples(true)
@ -88,6 +84,15 @@ impl Command for Reduce {
"Concatenate a string with itself, using a range to determine the number of times.", "Concatenate a string with itself, using a range to determine the number of times.",
result: Some(Value::test_string("StrStrStr")), result: Some(Value::test_string("StrStrStr")),
}, },
Example {
example: r#"[{a: 1} {b: 2} {c: 3}] | reduce {|it| merge $it}"#,
description: "Merge multiple records together, making use of the fact that the accumulated value is also supplied as pipeline input to the closure.",
result: Some(Value::test_record(record!(
"a" => Value::test_int(1),
"b" => Value::test_int(2),
"c" => Value::test_int(3),
))),
}
] ]
} }
@ -135,8 +140,8 @@ mod test {
#[test] #[test]
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::{test_examples_with_commands, Merge};
test_examples(Reduce {}) test_examples_with_commands(Reduce {}, &[&Merge])
} }
} }

View File

@ -20,7 +20,7 @@ impl Command for SkipUntil {
]) ])
.required( .required(
"predicate", "predicate",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"The predicate that skipped element must not match.", "The predicate that skipped element must not match.",
) )
.category(Category::Filters) .category(Category::Filters)

View File

@ -20,7 +20,7 @@ impl Command for SkipWhile {
]) ])
.required( .required(
"predicate", "predicate",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"The predicate that skipped element must match.", "The predicate that skipped element must match.",
) )
.category(Category::Filters) .category(Category::Filters)

View File

@ -17,7 +17,7 @@ impl Command for TakeUntil {
)]) )])
.required( .required(
"predicate", "predicate",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"The predicate that element(s) must not match.", "The predicate that element(s) must not match.",
) )
.category(Category::Filters) .category(Category::Filters)

View File

@ -20,7 +20,7 @@ impl Command for TakeWhile {
]) ])
.required( .required(
"predicate", "predicate",
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])), SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
"The predicate that element(s) must match.", "The predicate that element(s) must match.",
) )
.category(Category::Filters) .category(Category::Filters)

View File

@ -204,12 +204,45 @@ fn from_csv(
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use super::*; use super::*;
use crate::{Metadata, MetadataSet};
#[test] #[test]
fn test_examples() { fn test_examples() {
use crate::test_examples; use crate::test_examples;
test_examples(FromCsv {}) test_examples(FromCsv {})
} }
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromCsv {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#""a,b\n1,2" | metadata set --content-type 'text/csv' --datasource-ls | from csv | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
} }

View File

@ -93,9 +93,10 @@ pub(super) fn from_delimited_data(
input: PipelineData, input: PipelineData,
name: Span, name: Span,
) -> Result<PipelineData, ShellError> { ) -> Result<PipelineData, ShellError> {
let metadata = input.metadata().map(|md| md.with_content_type(None));
match input { match input {
PipelineData::Empty => Ok(PipelineData::Empty), PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Value(value, metadata) => { PipelineData::Value(value, ..) => {
let string = value.into_string()?; let string = value.into_string()?;
let byte_stream = ByteStream::read_string(string, name, Signals::empty()); let byte_stream = ByteStream::read_string(string, name, Signals::empty());
Ok(PipelineData::ListStream( Ok(PipelineData::ListStream(
@ -109,7 +110,7 @@ pub(super) fn from_delimited_data(
dst_span: name, dst_span: name,
src_span: list_stream.span(), src_span: list_stream.span(),
}), }),
PipelineData::ByteStream(byte_stream, metadata) => Ok(PipelineData::ListStream( PipelineData::ByteStream(byte_stream, ..) => Ok(PipelineData::ListStream(
from_delimited_stream(config, byte_stream, name)?, from_delimited_stream(config, byte_stream, name)?,
metadata, metadata,
)), )),

View File

@ -70,23 +70,22 @@ impl Command for FromJson {
let span = call.head; let span = call.head;
let strict = call.has_flag(engine_state, stack, "strict")?; let strict = call.has_flag(engine_state, stack, "strict")?;
let metadata = input.metadata().map(|md| md.with_content_type(None));
// TODO: turn this into a structured underline of the nu_json error // TODO: turn this into a structured underline of the nu_json error
if call.has_flag(engine_state, stack, "objects")? { if call.has_flag(engine_state, stack, "objects")? {
// Return a stream of JSON values, one for each non-empty line // Return a stream of JSON values, one for each non-empty line
match input { match input {
PipelineData::Value(Value::String { val, .. }, metadata) => { PipelineData::Value(Value::String { val, .. }, ..) => Ok(PipelineData::ListStream(
Ok(PipelineData::ListStream( read_json_lines(
read_json_lines( Cursor::new(val),
Cursor::new(val), span,
span, strict,
strict, engine_state.signals().clone(),
engine_state.signals().clone(), ),
), metadata,
metadata, )),
)) PipelineData::ByteStream(stream, ..)
}
PipelineData::ByteStream(stream, metadata)
if stream.type_() != ByteStreamType::Binary => if stream.type_() != ByteStreamType::Binary =>
{ {
if let Some(reader) = stream.reader() { if let Some(reader) = stream.reader() {
@ -107,7 +106,7 @@ impl Command for FromJson {
} }
} else { } else {
// Return a single JSON value // Return a single JSON value
let (string_input, span, metadata) = input.collect_string_strict(span)?; let (string_input, span, ..) = input.collect_string_strict(span)?;
if string_input.is_empty() { if string_input.is_empty() {
return Ok(Value::nothing(span).into_pipeline_data()); return Ok(Value::nothing(span).into_pipeline_data());
@ -267,6 +266,10 @@ fn convert_string_to_value_strict(string_input: &str, span: Span) -> Result<Valu
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use crate::{Metadata, MetadataSet};
use super::*; use super::*;
#[test] #[test]
@ -275,4 +278,33 @@ mod test {
test_examples(FromJson {}) test_examples(FromJson {})
} }
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromJson {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#"'{"a":1,"b":2}' | metadata set --content-type 'application/json' --datasource-ls | from json | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
} }

View File

@ -113,7 +113,8 @@ MessagePack: https://msgpack.org/
objects, objects,
signals: engine_state.signals().clone(), signals: engine_state.signals().clone(),
}; };
match input { let metadata = input.metadata().map(|md| md.with_content_type(None));
let out = match input {
// Deserialize from a byte buffer // Deserialize from a byte buffer
PipelineData::Value(Value::Binary { val: bytes, .. }, _) => { PipelineData::Value(Value::Binary { val: bytes, .. }, _) => {
read_msgpack(Cursor::new(bytes), opts) read_msgpack(Cursor::new(bytes), opts)
@ -136,7 +137,8 @@ MessagePack: https://msgpack.org/
dst_span: call.head, dst_span: call.head,
src_span: input.span().unwrap_or(call.head), src_span: input.span().unwrap_or(call.head),
}), }),
} };
out.map(|pd| pd.set_metadata(metadata))
} }
} }
@ -510,6 +512,10 @@ fn assert_eof(input: &mut impl io::Read, span: Span) -> Result<(), ShellError> {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use crate::{Metadata, MetadataSet, ToMsgpack};
use super::*; use super::*;
#[test] #[test]
@ -518,4 +524,34 @@ mod test {
test_examples(FromMsgpack {}) test_examples(FromMsgpack {})
} }
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(ToMsgpack {}));
working_set.add_decl(Box::new(FromMsgpack {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#"{a: 1 b: 2} | to msgpack | metadata set --datasource-ls | from msgpack | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
} }

View File

@ -43,7 +43,8 @@ impl Command for FromMsgpackz {
objects, objects,
signals: engine_state.signals().clone(), signals: engine_state.signals().clone(),
}; };
match input { let metadata = input.metadata().map(|md| md.with_content_type(None));
let out = match input {
// Deserialize from a byte buffer // Deserialize from a byte buffer
PipelineData::Value(Value::Binary { val: bytes, .. }, _) => { PipelineData::Value(Value::Binary { val: bytes, .. }, _) => {
let reader = brotli::Decompressor::new(Cursor::new(bytes), BUFFER_SIZE); let reader = brotli::Decompressor::new(Cursor::new(bytes), BUFFER_SIZE);
@ -68,6 +69,7 @@ impl Command for FromMsgpackz {
dst_span: call.head, dst_span: call.head,
src_span: span, src_span: span,
}), }),
} };
out.map(|pd| pd.set_metadata(metadata))
} }
} }

View File

@ -49,7 +49,8 @@ impl Command for FromNuon {
let (string_input, _span, metadata) = input.collect_string_strict(head)?; let (string_input, _span, metadata) = input.collect_string_strict(head)?;
match nuon::from_nuon(&string_input, Some(head)) { match nuon::from_nuon(&string_input, Some(head)) {
Ok(result) => Ok(result.into_pipeline_data_with_metadata(metadata)), Ok(result) => Ok(result
.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None)))),
Err(err) => Err(ShellError::GenericError { Err(err) => Err(ShellError::GenericError {
error: "error when loading nuon text".into(), error: "error when loading nuon text".into(),
msg: "could not load nuon text".into(), msg: "could not load nuon text".into(),
@ -63,6 +64,10 @@ impl Command for FromNuon {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use crate::{Metadata, MetadataSet};
use super::*; use super::*;
#[test] #[test]
@ -71,4 +76,33 @@ mod test {
test_examples(FromNuon {}) test_examples(FromNuon {})
} }
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromNuon {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#"'[[a, b]; [1, 2]]' | metadata set --content-type 'application/x-nuon' --datasource-ls | from nuon | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
} }

View File

@ -46,7 +46,8 @@ impl Command for FromOds {
vec![] vec![]
}; };
from_ods(input, head, sel_sheets) let metadata = input.metadata().map(|md| md.with_content_type(None));
from_ods(input, head, sel_sheets).map(|pd| pd.set_metadata(metadata))
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -29,7 +29,8 @@ impl Command for FromToml {
let span = call.head; let span = call.head;
let (mut string_input, span, metadata) = input.collect_string_strict(span)?; let (mut string_input, span, metadata) = input.collect_string_strict(span)?;
string_input.push('\n'); string_input.push('\n');
Ok(convert_string_to_value(string_input, span)?.into_pipeline_data_with_metadata(metadata)) Ok(convert_string_to_value(string_input, span)?
.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None))))
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {
@ -144,8 +145,11 @@ pub fn convert_string_to_value(string_input: String, span: Span) -> Result<Value
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::{Metadata, MetadataSet};
use super::*; use super::*;
use chrono::TimeZone; use chrono::TimeZone;
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use toml::value::Datetime; use toml::value::Datetime;
#[test] #[test]
@ -331,4 +335,33 @@ mod tests {
assert_eq!(result, reference_date); assert_eq!(result, reference_date);
} }
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromToml {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#""[a]\nb = 1\nc = 1" | metadata set --content-type 'text/x-toml' --datasource-ls | from toml | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
} }

View File

@ -165,6 +165,10 @@ fn from_tsv(
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use crate::{Metadata, MetadataSet};
use super::*; use super::*;
#[test] #[test]
@ -173,4 +177,33 @@ mod test {
test_examples(FromTsv {}) test_examples(FromTsv {})
} }
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromTsv {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#""a\tb\n1\t2" | metadata set --content-type 'text/tab-separated-values' --datasource-ls | from tsv | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
} }

View File

@ -47,7 +47,8 @@ impl Command for FromXlsx {
vec![] vec![]
}; };
from_xlsx(input, head, sel_sheets) let metadata = input.metadata().map(|md| md.with_content_type(None));
from_xlsx(input, head, sel_sheets).map(|pd| pd.set_metadata(metadata))
} }
fn examples(&self) -> Vec<Example> { fn examples(&self) -> Vec<Example> {

View File

@ -206,7 +206,9 @@ fn from_xml(input: PipelineData, info: &ParsingInfo) -> Result<PipelineData, She
let (concat_string, span, metadata) = input.collect_string_strict(info.span)?; let (concat_string, span, metadata) = input.collect_string_strict(info.span)?;
match from_xml_string_to_value(&concat_string, info) { match from_xml_string_to_value(&concat_string, info) {
Ok(x) => Ok(x.into_pipeline_data_with_metadata(metadata)), Ok(x) => {
Ok(x.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None))))
}
Err(err) => Err(process_xml_parse_error(err, span)), Err(err) => Err(process_xml_parse_error(err, span)),
} }
} }
@ -322,10 +324,14 @@ fn make_cant_convert_error(help: impl Into<String>, span: Span) -> ShellError {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::Metadata;
use crate::MetadataSet;
use super::*; use super::*;
use indexmap::indexmap; use indexmap::indexmap;
use indexmap::IndexMap; use indexmap::IndexMap;
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
fn string(input: impl Into<String>) -> Value { fn string(input: impl Into<String>) -> Value {
Value::test_string(input) Value::test_string(input)
@ -480,4 +486,36 @@ mod tests {
test_examples(FromXml {}) test_examples(FromXml {})
} }
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromXml {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#"'<?xml version="1.0" encoding="UTF-8"?>
<note>
<remember>Event</remember>
</note>' | metadata set --content-type 'application/xml' --datasource-ls | from xml | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
} }

View File

@ -235,14 +235,19 @@ fn from_yaml(input: PipelineData, head: Span) -> Result<PipelineData, ShellError
let (concat_string, span, metadata) = input.collect_string_strict(head)?; let (concat_string, span, metadata) = input.collect_string_strict(head)?;
match from_yaml_string_to_value(&concat_string, head, span) { match from_yaml_string_to_value(&concat_string, head, span) {
Ok(x) => Ok(x.into_pipeline_data_with_metadata(metadata)), Ok(x) => {
Ok(x.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None))))
}
Err(other) => Err(other), Err(other) => Err(other),
} }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use crate::{Metadata, MetadataSet};
use super::*; use super::*;
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use nu_protocol::Config; use nu_protocol::Config;
#[test] #[test]
@ -395,4 +400,33 @@ mod test {
assert!(result.ok().unwrap() == test_case.expected.ok().unwrap()); assert!(result.ok().unwrap() == test_case.expected.ok().unwrap());
} }
} }
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromYaml {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#""a: 1\nb: 2" | metadata set --content-type 'application/yaml' --datasource-ls | from yaml | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
} }

View File

@ -195,9 +195,6 @@ pub fn env_to_strings(
} }
} }
#[cfg(windows)]
stack.pwd_per_drive.get_env_vars(&mut env_vars_str);
Ok(env_vars_str) Ok(env_vars_str)
} }

View File

@ -194,11 +194,6 @@ pub fn redirect_env(engine_state: &EngineState, caller_stack: &mut Stack, callee
caller_stack.add_env_var(var, value); caller_stack.add_env_var(var, value);
} }
#[cfg(windows)]
{
caller_stack.pwd_per_drive = callee_stack.pwd_per_drive.clone();
}
// set config to callee config, to capture any updates to that // set config to callee config, to capture any updates to that
caller_stack.config.clone_from(&callee_stack.config); caller_stack.config.clone_from(&callee_stack.config);
} }

View File

@ -1,6 +1,6 @@
use std::{borrow::Cow, fs::File, sync::Arc}; use std::{borrow::Cow, fs::File, sync::Arc};
use nu_path::AbsolutePathBuf; use nu_path::{expand_path_with, AbsolutePathBuf};
use nu_protocol::{ use nu_protocol::{
ast::{Bits, Block, Boolean, CellPath, Comparison, Math, Operator}, ast::{Bits, Block, Boolean, CellPath, Comparison, Math, Operator},
debugger::DebugContext, debugger::DebugContext,
@ -879,7 +879,7 @@ fn literal_value(
Value::string(path, span) Value::string(path, span)
} else { } else {
let cwd = ctx.engine_state.cwd(Some(ctx.stack))?; let cwd = ctx.engine_state.cwd(Some(ctx.stack))?;
let path = ctx.stack.expand_path_with(path, cwd, true); let path = expand_path_with(path, cwd, true);
Value::string(path.to_string_lossy(), span) Value::string(path.to_string_lossy(), span)
} }
@ -899,7 +899,7 @@ fn literal_value(
.cwd(Some(ctx.stack)) .cwd(Some(ctx.stack))
.map(AbsolutePathBuf::into_std_path_buf) .map(AbsolutePathBuf::into_std_path_buf)
.unwrap_or_default(); .unwrap_or_default();
let path = ctx.stack.expand_path_with(path, cwd, true); let path = expand_path_with(path, cwd, true);
Value::string(path.to_string_lossy(), span) Value::string(path.to_string_lossy(), span)
} }
@ -1414,8 +1414,7 @@ enum RedirectionStream {
/// Open a file for redirection /// Open a file for redirection
fn open_file(ctx: &EvalContext<'_>, path: &Value, append: bool) -> Result<Arc<File>, ShellError> { fn open_file(ctx: &EvalContext<'_>, path: &Value, append: bool) -> Result<Arc<File>, ShellError> {
let path_expanded = let path_expanded =
ctx.stack expand_path_with(path.as_str()?, ctx.engine_state.cwd(Some(ctx.stack))?, true);
.expand_path_with(path.as_str()?, ctx.engine_state.cwd(Some(ctx.stack))?, true);
let mut options = File::options(); let mut options = File::options();
if append { if append {
options.append(true); options.append(true);
@ -1495,3 +1494,26 @@ fn eval_iterate(
eval_iterate(ctx, dst, stream, end_index) eval_iterate(ctx, dst, stream, end_index)
} }
} }
/// Redirect environment from the callee stack to the caller stack
fn redirect_env(engine_state: &EngineState, caller_stack: &mut Stack, callee_stack: &Stack) {
// TODO: make this more efficient
// Grab all environment variables from the callee
let caller_env_vars = caller_stack.get_env_var_names(engine_state);
// remove env vars that are present in the caller but not in the callee
// (the callee hid them)
for var in caller_env_vars.iter() {
if !callee_stack.has_env_var(engine_state, var) {
caller_stack.remove_env_var(engine_state, var);
}
}
// add new env vars from callee to caller
for (var, value) in callee_stack.get_stack_env_vars() {
caller_stack.add_env_var(var, value);
}
// set config to callee config, to capture any updates to that
caller_stack.config.clone_from(&callee_stack.config);
}

View File

@ -6,8 +6,6 @@ pub mod expansions;
pub mod form; pub mod form;
mod helpers; mod helpers;
mod path; mod path;
#[cfg(windows)]
pub mod pwd_per_drive;
mod tilde; mod tilde;
mod trailing_slash; mod trailing_slash;
@ -15,7 +13,5 @@ pub use components::components;
pub use expansions::{canonicalize_with, expand_path_with, expand_to_real_path, locate_in_dirs}; pub use expansions::{canonicalize_with, expand_path_with, expand_to_real_path, locate_in_dirs};
pub use helpers::{cache_dir, data_dir, home_dir, nu_config_dir}; pub use helpers::{cache_dir, data_dir, home_dir, nu_config_dir};
pub use path::*; pub use path::*;
#[cfg(windows)]
pub use pwd_per_drive::DriveToPwdMap;
pub use tilde::expand_tilde; pub use tilde::expand_tilde;
pub use trailing_slash::{has_trailing_slash, strip_trailing_slash}; pub use trailing_slash::{has_trailing_slash, strip_trailing_slash};

View File

@ -1,331 +0,0 @@
/// Usage for pwd_per_drive on windows
///
/// let mut map = DriveToPwdMap::new();
///
/// Upon change PWD, call map.set_pwd() with absolute path
///
/// Call map.expand_pwd() with relative path to get absolution path
///
/// ```
/// use std::path::{Path, PathBuf};
/// use nu_path::DriveToPwdMap;
///
/// let mut map = DriveToPwdMap::new();
///
/// // Set PWD for drive C
/// assert!(map.set_pwd(Path::new(r"C:\Users\Home")).is_ok());
///
/// // Expand a relative path
/// let expanded = map.expand_pwd(Path::new("c:test"));
/// assert_eq!(expanded, Some(PathBuf::from(r"C:\Users\Home\test")));
///
/// // Will NOT expand an absolute path
/// let expanded = map.expand_pwd(Path::new(r"C:\absolute\path"));
/// assert_eq!(expanded, None);
///
/// // Expand with no drive letter
/// let expanded = map.expand_pwd(Path::new(r"\no_drive"));
/// assert_eq!(expanded, None);
///
/// // Expand with no PWD set for the drive
/// let expanded = map.expand_pwd(Path::new("D:test"));
/// assert!(expanded.is_some());
/// let abs_path = expanded.unwrap().as_path().to_str().expect("OK").to_string();
/// assert!(abs_path.starts_with(r"D:\"));
/// assert!(abs_path.ends_with(r"\test"));
///
/// // Get env vars for child process
/// use std::collections::HashMap;
/// let mut env = HashMap::<String, String>::new();
/// map.get_env_vars(&mut env);
/// assert_eq!(env.get("=C:").unwrap(), r"C:\Users\Home");
/// ```
use std::collections::HashMap;
use std::path::{Path, PathBuf};
#[derive(Debug, PartialEq)]
pub enum PathError {
InvalidDriveLetter,
InvalidPath,
}
/// Helper to check if input path is relative path
/// with drive letter, it can be expanded with PWD-per-drive.
fn need_expand(path: &Path) -> bool {
if let Some(path_str) = path.to_str() {
let chars: Vec<char> = path_str.chars().collect();
if chars.len() >= 2 {
return chars[1] == ':' && (chars.len() == 2 || (chars[2] != '/' && chars[2] != '\\'));
}
}
false
}
#[derive(Clone, Debug)]
pub struct DriveToPwdMap {
map: [Option<String>; 26], // Fixed-size array for A-Z
}
impl Default for DriveToPwdMap {
fn default() -> Self {
Self::new()
}
}
impl DriveToPwdMap {
pub fn new() -> Self {
Self {
map: Default::default(),
}
}
pub fn env_var_for_drive(drive_letter: char) -> String {
let drive_letter = drive_letter.to_ascii_uppercase();
format!("={}:", drive_letter)
}
/// Collect PWD-per-drive as env vars (for child process)
pub fn get_env_vars(&self, env: &mut HashMap<String, String>) {
for (drive_index, drive_letter) in ('A'..='Z').enumerate() {
if let Some(pwd) = self.map[drive_index].clone() {
if pwd.len() > 3 {
let env_var_for_drive = Self::env_var_for_drive(drive_letter);
env.insert(env_var_for_drive, pwd);
}
}
}
}
/// Set the PWD for the drive letter in the absolute path.
/// Return PathError for error.
pub fn set_pwd(&mut self, path: &Path) -> Result<(), PathError> {
if let (Some(drive_letter), Some(path_str)) =
(Self::extract_drive_letter(path), path.to_str())
{
if drive_letter.is_ascii_alphabetic() {
let drive_letter = drive_letter.to_ascii_uppercase();
// Make sure saved drive letter is upper case
let mut c = path_str.chars();
match c.next() {
None => Err(PathError::InvalidDriveLetter),
Some(_) => {
let drive_index = drive_letter as usize - 'A' as usize;
let normalized_pwd = drive_letter.to_string() + c.as_str();
self.map[drive_index] = Some(normalized_pwd);
Ok(())
}
}
} else {
Err(PathError::InvalidDriveLetter)
}
} else {
Err(PathError::InvalidPath)
}
}
/// Get the PWD for drive, if not yet, ask GetFullPathNameW() or omnipath,
/// or else return default r"X:\".
fn get_pwd(&self, drive_letter: char) -> Result<String, PathError> {
if drive_letter.is_ascii_alphabetic() {
let drive_letter = drive_letter.to_ascii_uppercase();
let drive_index = drive_letter as usize - 'A' as usize;
Ok(self.map[drive_index].clone().unwrap_or_else(|| {
if let Some(sys_pwd) = get_full_path_name_w(&format!("{}:", drive_letter)) {
sys_pwd
} else {
format!(r"{}:\", drive_letter)
}
}))
} else {
Err(PathError::InvalidDriveLetter)
}
}
/// Expand a relative path using the PWD-per-drive, return PathBuf
/// of absolute path.
/// Return None if path is not valid or can't get drive letter.
pub fn expand_pwd(&self, path: &Path) -> Option<PathBuf> {
if need_expand(path) {
let path_str = path.to_str()?;
if let Some(drive_letter) = Self::extract_drive_letter(path) {
if let Ok(pwd) = self.get_pwd(drive_letter) {
// Combine current PWD with the relative path
let mut base = PathBuf::from(Self::ensure_trailing_delimiter(&pwd));
// need_expand() and extract_drive_letter() all ensure path_str.len() >= 2
base.push(&path_str[2..]); // Join PWD with path parts after "C:"
return Some(base);
}
}
}
None // Invalid path or has no drive letter
}
/// Helper to extract the drive letter from a path, keep case
/// (e.g., `C:test` -> `C`, `d:\temp` -> `d`)
fn extract_drive_letter(path: &Path) -> Option<char> {
path.to_str()
.and_then(|s| s.chars().next())
.filter(|c| c.is_ascii_alphabetic())
}
/// Ensure a path has a trailing `\\` or '/'
fn ensure_trailing_delimiter(path: &str) -> String {
if !path.ends_with('\\') && !path.ends_with('/') {
format!(r"{}\", path)
} else {
path.to_string()
}
}
}
fn get_full_path_name_w(path_str: &str) -> Option<String> {
use omnipath::sys_absolute;
if let Ok(path_sys_abs) = sys_absolute(Path::new(path_str)) {
Some(path_sys_abs.to_str()?.to_string())
} else {
None
}
}
/// Test for Drive2PWD map
#[cfg(test)]
mod tests {
use super::*;
/// Test or demo usage of PWD-per-drive
/// In doctest, there's no get_full_path_name_w available so can't foresee
/// possible result, here can have more accurate test assert
#[test]
fn test_usage_for_pwd_per_drive() {
let mut map = DriveToPwdMap::new();
// Set PWD for drive E
assert!(map.set_pwd(Path::new(r"E:\Users\Home")).is_ok());
// Expand a relative path
let expanded = map.expand_pwd(Path::new("e:test"));
assert_eq!(expanded, Some(PathBuf::from(r"E:\Users\Home\test")));
// Will NOT expand an absolute path
let expanded = map.expand_pwd(Path::new(r"E:\absolute\path"));
assert_eq!(expanded, None);
// Expand with no drive letter
let expanded = map.expand_pwd(Path::new(r"\no_drive"));
assert_eq!(expanded, None);
// Expand with no PWD set for the drive
let expanded = map.expand_pwd(Path::new("F:test"));
if let Some(sys_abs) = get_full_path_name_w("F:") {
assert_eq!(
expanded,
Some(PathBuf::from(format!(
"{}test",
DriveToPwdMap::ensure_trailing_delimiter(&sys_abs)
)))
);
} else {
assert_eq!(expanded, Some(PathBuf::from(r"F:\test")));
}
}
#[test]
fn test_get_env_vars() {
let mut map = DriveToPwdMap::new();
map.set_pwd(Path::new(r"I:\Home")).unwrap();
map.set_pwd(Path::new(r"j:\User")).unwrap();
let mut env = HashMap::<String, String>::new();
map.get_env_vars(&mut env);
assert_eq!(
env.get(&DriveToPwdMap::env_var_for_drive('I')).unwrap(),
r"I:\Home"
);
assert_eq!(
env.get(&DriveToPwdMap::env_var_for_drive('J')).unwrap(),
r"J:\User"
);
}
#[test]
fn test_expand_pwd() {
let mut drive_map = DriveToPwdMap::new();
// Set PWD for drive 'M:'
assert_eq!(drive_map.set_pwd(Path::new(r"M:\Users")), Ok(()));
// or 'm:'
assert_eq!(drive_map.set_pwd(Path::new(r"m:\Users\Home")), Ok(()));
// Expand a relative path on "M:"
let expanded = drive_map.expand_pwd(Path::new(r"M:test"));
assert_eq!(expanded, Some(PathBuf::from(r"M:\Users\Home\test")));
// or on "m:"
let expanded = drive_map.expand_pwd(Path::new(r"m:test"));
assert_eq!(expanded, Some(PathBuf::from(r"M:\Users\Home\test")));
// Expand an absolute path
let expanded = drive_map.expand_pwd(Path::new(r"m:\absolute\path"));
assert_eq!(expanded, None);
// Expand with no drive letter
let expanded = drive_map.expand_pwd(Path::new(r"\no_drive"));
assert_eq!(expanded, None);
// Expand with no PWD set for the drive
let expanded = drive_map.expand_pwd(Path::new("N:test"));
if let Some(pwd_on_drive) = get_full_path_name_w("N:") {
assert_eq!(
expanded,
Some(PathBuf::from(format!(
r"{}test",
DriveToPwdMap::ensure_trailing_delimiter(&pwd_on_drive)
)))
);
} else {
assert_eq!(expanded, Some(PathBuf::from(r"N:\test")));
}
}
#[test]
fn test_set_and_get_pwd() {
let mut drive_map = DriveToPwdMap::new();
// Set PWD for drive 'O'
assert!(drive_map.set_pwd(Path::new(r"O:\Users")).is_ok());
// Or for drive 'o'
assert!(drive_map.set_pwd(Path::new(r"o:\Users\Example")).is_ok());
// Get PWD for drive 'O'
assert_eq!(drive_map.get_pwd('O'), Ok(r"O:\Users\Example".to_string()));
// or 'o'
assert_eq!(drive_map.get_pwd('o'), Ok(r"O:\Users\Example".to_string()));
// Get PWD for drive P (not set yet, but system might already
// have PWD on this drive)
if let Some(pwd_on_drive) = get_full_path_name_w("P:") {
assert_eq!(drive_map.get_pwd('P'), Ok(pwd_on_drive));
} else {
assert_eq!(drive_map.get_pwd('P'), Ok(r"P:\".to_string()));
}
}
#[test]
fn test_set_pwd_invalid_path() {
let mut drive_map = DriveToPwdMap::new();
// Invalid path (no drive letter)
let result = drive_map.set_pwd(Path::new(r"\InvalidPath"));
assert!(result.is_err());
assert_eq!(result.unwrap_err(), PathError::InvalidPath);
}
#[test]
fn test_get_pwd_invalid_drive() {
let drive_map = DriveToPwdMap::new();
// Get PWD for a drive not set (e.g., Z)
assert_eq!(drive_map.get_pwd('Z'), Ok(r"Z:\".to_string()));
// Invalid drive letter (non-alphabetic)
assert_eq!(drive_map.get_pwd('1'), Err(PathError::InvalidDriveLetter));
}
}

View File

@ -9,7 +9,6 @@ use nu_utils::IgnoreCaseExt;
use std::{ use std::{
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
fs::File, fs::File,
path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
@ -54,8 +53,6 @@ pub struct Stack {
/// Locally updated config. Use [`.get_config()`](Self::get_config) to access correctly. /// Locally updated config. Use [`.get_config()`](Self::get_config) to access correctly.
pub config: Option<Arc<Config>>, pub config: Option<Arc<Config>>,
pub(crate) out_dest: StackOutDest, pub(crate) out_dest: StackOutDest,
#[cfg(windows)]
pub pwd_per_drive: nu_path::DriveToPwdMap,
} }
impl Default for Stack { impl Default for Stack {
@ -85,8 +82,6 @@ impl Stack {
parent_deletions: vec![], parent_deletions: vec![],
config: None, config: None,
out_dest: StackOutDest::new(), out_dest: StackOutDest::new(),
#[cfg(windows)]
pwd_per_drive: nu_path::DriveToPwdMap::new(),
} }
} }
@ -107,8 +102,6 @@ impl Stack {
parent_deletions: vec![], parent_deletions: vec![],
config: parent.config.clone(), config: parent.config.clone(),
out_dest: parent.out_dest.clone(), out_dest: parent.out_dest.clone(),
#[cfg(windows)]
pwd_per_drive: parent.pwd_per_drive.clone(),
parent_stack: Some(parent), parent_stack: Some(parent),
} }
} }
@ -135,10 +128,6 @@ impl Stack {
unique_stack.env_hidden = child.env_hidden; unique_stack.env_hidden = child.env_hidden;
unique_stack.active_overlays = child.active_overlays; unique_stack.active_overlays = child.active_overlays;
unique_stack.config = child.config; unique_stack.config = child.config;
#[cfg(windows)]
{
unique_stack.pwd_per_drive = child.pwd_per_drive.clone();
}
unique_stack unique_stack
} }
@ -330,8 +319,6 @@ impl Stack {
parent_deletions: vec![], parent_deletions: vec![],
config: self.config.clone(), config: self.config.clone(),
out_dest: self.out_dest.clone(), out_dest: self.out_dest.clone(),
#[cfg(windows)]
pwd_per_drive: self.pwd_per_drive.clone(),
} }
} }
@ -365,8 +352,6 @@ impl Stack {
parent_deletions: vec![], parent_deletions: vec![],
config: self.config.clone(), config: self.config.clone(),
out_dest: self.out_dest.clone(), out_dest: self.out_dest.clone(),
#[cfg(windows)]
pwd_per_drive: self.pwd_per_drive.clone(),
} }
} }
@ -776,29 +761,9 @@ impl Stack {
let path = nu_path::strip_trailing_slash(path); let path = nu_path::strip_trailing_slash(path);
let value = Value::string(path.to_string_lossy(), Span::unknown()); let value = Value::string(path.to_string_lossy(), Span::unknown());
self.add_env_var("PWD".into(), value); self.add_env_var("PWD".into(), value);
// Sync with PWD-per-drive
#[cfg(windows)]
{
let _ = self.pwd_per_drive.set_pwd(&path);
}
Ok(()) Ok(())
} }
} }
// Helper stub/proxy for nu_path::expand_path_with::<P, Q>(path, relative_to, expand_tilde)
// Facilitates file system commands to easily gain the ability to expand PWD-per-drive
pub fn expand_path_with<P, Q>(&self, path: P, relative_to: Q, expand_tilde: bool) -> PathBuf
where
P: AsRef<Path>,
Q: AsRef<Path>,
{
#[cfg(windows)]
if let Some(absolute_path) = self.pwd_per_drive.expand_pwd(path.as_ref()) {
return absolute_path;
}
nu_path::expand_path_with::<P, Q>(path, relative_to, expand_tilde)
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -101,8 +101,9 @@ export def intersperse [ # -> list<any>
# Returns a list of intermediate steps performed by `reduce` # Returns a list of intermediate steps performed by `reduce`
# (`fold`). It takes two arguments, an initial value to seed the # (`fold`). It takes two arguments, an initial value to seed the
# initial state and a closure that takes two arguments, the first # initial state and a closure that takes two arguments, the first
# being the internal state and the second the list element in the # being the list element in the current iteration and the second
# current iteration. # the internal state.
# The internal state is also provided as pipeline input.
# #
# # Example # # Example
# ``` # ```
@ -123,7 +124,8 @@ export def scan [ # -> list<any>
--noinit(-n) # remove the initial value from the result --noinit(-n) # remove the initial value from the result
] { ] {
reduce --fold [$init] {|it, acc| reduce --fold [$init] {|it, acc|
$acc ++ [(do $fn ($acc | last) $it)] let acc_last = $acc | last
$acc ++ [($acc_last | do $fn $it $acc_last)]
} }
| if $noinit { | if $noinit {
$in | skip $in | skip

View File

@ -49,7 +49,10 @@ def iter_scan [] {
let scanned = ([1 2 3] | iter scan 0 {|x, y| $x + $y}) let scanned = ([1 2 3] | iter scan 0 {|x, y| $x + $y})
assert equal $scanned [0, 1, 3, 6] assert equal $scanned [0, 1, 3, 6]
let scanned = ([a b c d] | iter scan "" {|x, y| [$x, $y] | str join} -n) let scanned = ([a b c d] | iter scan "" {|it, acc| [$acc, $it] | str join} -n)
assert equal $scanned ["a" "ab" "abc" "abcd"]
let scanned = ([a b c d] | iter scan "" {|it, acc| append $it | str join} -n)
assert equal $scanned ["a" "ab" "abc" "abcd"] assert equal $scanned ["a" "ab" "abc" "abcd"]
} }

View File

@ -20,6 +20,8 @@ $env.config.color_config = {
row_index: green_bold row_index: green_bold
record: white record: white
list: white list: white
closure: green_bold
glob:cyan_bold
block: white block: white
hints: dark_gray hints: dark_gray
search_result: { bg: red fg: white } search_result: { bg: red fg: white }

View File

@ -17,11 +17,6 @@ $env.PROMPT_COMMAND = $env.PROMPT_COMMAND? | default {||
$path_segment | str replace --all (char path_sep) $"($separator_color)(char path_sep)($path_color)" $path_segment | str replace --all (char path_sep) $"($separator_color)(char path_sep)($path_color)"
} }
$env.PROMPT_INDICATOR = $env.PROMPT_INDICATOR? | default "> "
$env.PROMPT_INDICATOR_VI_NORMAL = $env.PROMPT_INDICATOR_VI_NORMAL? | default "> "
$env.PROMPT_INDICATOR_VI_INSERT = $env.PROMPT_INDICATOR_VI_INSERT? | default ": "
$env.PROMPT_MULTILINE_INDICATOR = $env.PROMPT_MULTILINE_INDICATOR? | default "::: "
$env.PROMPT_COMMAND_RIGHT = $env.PROMPT_COMMAND_RIGHT? | default {|| $env.PROMPT_COMMAND_RIGHT = $env.PROMPT_COMMAND_RIGHT? | default {||
# create a right prompt in magenta with green separators and am/pm underlined # create a right prompt in magenta with green separators and am/pm underlined
let time_segment = ([ let time_segment = ([

View File

@ -320,9 +320,29 @@ fn main() -> Result<()> {
"NU_VERSION".to_string(), "NU_VERSION".to_string(),
Value::string(env!("CARGO_PKG_VERSION"), Span::unknown()), Value::string(env!("CARGO_PKG_VERSION"), Span::unknown()),
); );
// Add SHLVL if interactive // Add SHLVL if interactive
if engine_state.is_interactive { if engine_state.is_interactive {
engine_state.add_env_var("PROMPT_INDICATOR".to_string(), Value::test_string("> "));
engine_state.add_env_var(
"PROMPT_INDICATOR_VI_NORMAL".to_string(),
Value::test_string("> "),
);
engine_state.add_env_var(
"PROMPT_INDICATOR_VI_INSERT".to_string(),
Value::test_string(": "),
);
engine_state.add_env_var(
"PROMPT_MULTILINE_INDICATOR".to_string(),
Value::test_string("::: "),
);
engine_state.add_env_var(
"TRANSIENT_PROMPT_MULTILINE_INDICATOR".to_string(),
Value::test_string(""),
);
engine_state.add_env_var(
"TRANSIENT_PROMPT_COMMAND_RIGHT".to_string(),
Value::test_string(""),
);
let mut shlvl = engine_state let mut shlvl = engine_state
.get_env_var("SHLVL") .get_env_var("SHLVL")
.map(|x| x.as_str().unwrap_or("0").parse::<i64>().unwrap_or(0)) .map(|x| x.as_str().unwrap_or("0").parse::<i64>().unwrap_or(0))

View File

@ -12,30 +12,6 @@ use nu_protocol::{
}; };
use nu_utils::perf; use nu_utils::perf;
#[cfg(windows)]
fn init_pwd_per_drive(engine_state: &EngineState, stack: &mut Stack) {
use nu_path::DriveToPwdMap;
use std::path::Path;
// Read environment for PWD-per-drive
for drive_letter in 'A'..='Z' {
let env_var = DriveToPwdMap::env_var_for_drive(drive_letter);
if let Some(env_pwd) = engine_state.get_env_var(&env_var) {
if let Ok(pwd_str) = nu_engine::env_to_string(&env_var, env_pwd, engine_state, stack) {
trace!("Get Env({}) {}", env_var, pwd_str);
let _ = stack.pwd_per_drive.set_pwd(Path::new(&pwd_str));
stack.remove_env_var(engine_state, &env_var);
}
}
}
if let Ok(abs_pwd) = engine_state.cwd(None) {
if let Some(abs_pwd_str) = abs_pwd.to_str() {
let _ = stack.pwd_per_drive.set_pwd(Path::new(abs_pwd_str));
}
}
}
pub(crate) fn run_commands( pub(crate) fn run_commands(
engine_state: &mut EngineState, engine_state: &mut EngineState,
parsed_nu_cli_args: command::NushellCliArgs, parsed_nu_cli_args: command::NushellCliArgs,
@ -50,8 +26,6 @@ pub(crate) fn run_commands(
let create_scaffold = nu_path::nu_config_dir().map_or(false, |p| !p.exists()); let create_scaffold = nu_path::nu_config_dir().map_or(false, |p| !p.exists());
let mut stack = Stack::new(); let mut stack = Stack::new();
#[cfg(windows)]
init_pwd_per_drive(engine_state, &mut stack);
// if the --no-config-file(-n) option is NOT passed, load the plugin file, // if the --no-config-file(-n) option is NOT passed, load the plugin file,
// load the default env file or custom (depending on parsed_nu_cli_args.env_file), // load the default env file or custom (depending on parsed_nu_cli_args.env_file),
@ -141,8 +115,6 @@ pub(crate) fn run_file(
) { ) {
trace!("run_file"); trace!("run_file");
let mut stack = Stack::new(); let mut stack = Stack::new();
#[cfg(windows)]
init_pwd_per_drive(engine_state, &mut stack);
// if the --no-config-file(-n) option is NOT passed, load the plugin file, // if the --no-config-file(-n) option is NOT passed, load the plugin file,
// load the default env file or custom (depending on parsed_nu_cli_args.env_file), // load the default env file or custom (depending on parsed_nu_cli_args.env_file),
@ -210,9 +182,6 @@ pub(crate) fn run_repl(
) -> Result<(), miette::ErrReport> { ) -> Result<(), miette::ErrReport> {
trace!("run_repl"); trace!("run_repl");
let mut stack = Stack::new(); let mut stack = Stack::new();
#[cfg(windows)]
init_pwd_per_drive(engine_state, &mut stack);
let start_time = std::time::Instant::now(); let start_time = std::time::Instant::now();
if parsed_nu_cli_args.no_config_file.is_none() { if parsed_nu_cli_args.no_config_file.is_none() {