mirror of
https://github.com/nushell/nushell.git
synced 2024-11-21 16:03:19 +01:00
Remove it expansion (#2701)
* Remove it-expansion, take 2 * Cleanup * silly update to test CI
This commit is contained in:
parent
502c9ea706
commit
6951fb440c
@ -219,15 +219,15 @@ We can pipeline this into a command that gets the contents of one of the columns
|
||||
name │ nu
|
||||
readme │ README.md
|
||||
repository │ https://github.com/nushell/nushell
|
||||
version │ 0.15.1
|
||||
version │ 0.21.0
|
||||
───────────────┴────────────────────────────────────
|
||||
```
|
||||
|
||||
Finally, we can use commands outside of Nu once we have the data we want:
|
||||
|
||||
```shell
|
||||
> open Cargo.toml | get package.version | echo $it
|
||||
0.15.1
|
||||
> open Cargo.toml | get package.version
|
||||
0.21.0
|
||||
```
|
||||
|
||||
Here we use the variable `$it` to refer to the value being piped to the external command.
|
||||
|
@ -393,13 +393,10 @@ pub async fn cli(mut context: EvaluationContext) -> Result<(), Box<dyn Error>> {
|
||||
|
||||
match nu_parser::lite_parse(&prompt_line, 0).map_err(ShellError::from) {
|
||||
Ok(result) => {
|
||||
let mut prompt_block =
|
||||
nu_parser::classify_block(&result, context.registry());
|
||||
let prompt_block = nu_parser::classify_block(&result, context.registry());
|
||||
|
||||
let env = context.get_env();
|
||||
|
||||
prompt_block.block.expand_it_usage();
|
||||
|
||||
match run_block(
|
||||
&prompt_block.block,
|
||||
&mut context,
|
||||
@ -862,8 +859,7 @@ pub async fn parse_and_eval(line: &str, ctx: &mut EvaluationContext) -> Result<S
|
||||
let lite_result = nu_parser::lite_parse(&line, 0)?;
|
||||
|
||||
// TODO ensure the command whose examples we're testing is actually in the pipeline
|
||||
let mut classified_block = nu_parser::classify_block(&lite_result, ctx.registry());
|
||||
classified_block.block.expand_it_usage();
|
||||
let classified_block = nu_parser::classify_block(&lite_result, ctx.registry());
|
||||
|
||||
let input_stream = InputStream::empty();
|
||||
let env = ctx.get_env();
|
||||
@ -904,7 +900,7 @@ pub async fn process_line(
|
||||
debug!("=== Parsed ===");
|
||||
debug!("{:#?}", result);
|
||||
|
||||
let mut classified_block = nu_parser::classify_block(&result, ctx.registry());
|
||||
let classified_block = nu_parser::classify_block(&result, ctx.registry());
|
||||
|
||||
debug!("{:#?}", classified_block);
|
||||
//println!("{:#?}", pipeline);
|
||||
@ -1021,8 +1017,6 @@ pub async fn process_line(
|
||||
InputStream::empty()
|
||||
};
|
||||
|
||||
classified_block.block.expand_it_usage();
|
||||
|
||||
trace!("{:#?}", classified_block);
|
||||
let env = ctx.get_env();
|
||||
match run_block(
|
||||
|
@ -4,7 +4,7 @@ use crate::prelude::*;
|
||||
use nu_data::config;
|
||||
use nu_errors::ShellError;
|
||||
use nu_parser::SignatureRegistry;
|
||||
use nu_protocol::hir::{ClassifiedCommand, Expression, NamedValue, SpannedExpression, Variable};
|
||||
use nu_protocol::hir::{ClassifiedCommand, Expression, NamedValue, SpannedExpression};
|
||||
use nu_protocol::{
|
||||
hir::Block, CommandAction, NamedType, PositionalType, ReturnSuccess, Signature, SyntaxShape,
|
||||
UntaggedValue, Value,
|
||||
@ -243,7 +243,7 @@ fn find_expr_shapes(
|
||||
Expression::Block(b) => find_block_shapes(&b, registry),
|
||||
Expression::Path(path) => match &path.head.expr {
|
||||
Expression::Invocation(b) => find_block_shapes(&b, registry),
|
||||
Expression::Variable(Variable::Other(var, _)) => {
|
||||
Expression::Variable(var, _) => {
|
||||
let mut result = HashMap::new();
|
||||
result.insert(var.to_string(), (spanned_expr.span, None));
|
||||
Ok(result)
|
||||
|
@ -219,33 +219,15 @@ fn spawn(
|
||||
UntaggedValue::Primitive(Primitive::Nothing) => continue,
|
||||
UntaggedValue::Primitive(Primitive::String(s))
|
||||
| UntaggedValue::Primitive(Primitive::Line(s)) => {
|
||||
if let Err(e) = stdin_write.write(s.as_bytes()) {
|
||||
let message = format!("Unable to write to stdin (error = {})", e);
|
||||
|
||||
let _ = stdin_write_tx.send(Ok(Value {
|
||||
value: UntaggedValue::Error(ShellError::labeled_error(
|
||||
message,
|
||||
"application may have closed before completing pipeline",
|
||||
&stdin_name_tag,
|
||||
)),
|
||||
tag: stdin_name_tag,
|
||||
}));
|
||||
return Err(());
|
||||
if stdin_write.write(s.as_bytes()).is_err() {
|
||||
// Other side has closed, so exit
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
UntaggedValue::Primitive(Primitive::Binary(b)) => {
|
||||
if let Err(e) = stdin_write.write(b) {
|
||||
let message = format!("Unable to write to stdin (error = {})", e);
|
||||
|
||||
let _ = stdin_write_tx.send(Ok(Value {
|
||||
value: UntaggedValue::Error(ShellError::labeled_error(
|
||||
message,
|
||||
"application may have closed before completing pipeline",
|
||||
&stdin_name_tag,
|
||||
)),
|
||||
tag: stdin_name_tag,
|
||||
}));
|
||||
return Err(());
|
||||
if stdin_write.write(b).is_err() {
|
||||
// Other side has closed, so exit
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
unsupported => {
|
||||
|
@ -97,7 +97,7 @@ pub async fn process_row(
|
||||
&block,
|
||||
Arc::make_mut(&mut context),
|
||||
input_stream,
|
||||
Scope::append_it(scope, input),
|
||||
Scope::append_var(scope, "$it", input),
|
||||
)
|
||||
.await?
|
||||
.to_output_stream())
|
||||
|
@ -187,7 +187,7 @@ async fn process_row(
|
||||
let for_block = input.clone();
|
||||
let input_stream = once(async { Ok(for_block) }).to_input_stream();
|
||||
|
||||
let scope = Scope::append_it(scope, input.clone());
|
||||
let scope = Scope::append_var(scope, "$it", input.clone());
|
||||
|
||||
let mut stream = run_block(
|
||||
&default_block,
|
||||
|
@ -50,7 +50,7 @@ impl WholeStreamCommand for Command {
|
||||
},
|
||||
Example {
|
||||
description: "restrict the flattening by passing column names",
|
||||
example: "echo [[origin, crate, versions]; [World, $(echo [[name]; ['nu-cli']]), ['0.21', '0.22']]] | flatten versions | last | = $it.versions",
|
||||
example: "echo [[origin, crate, versions]; [World, $(echo [[name]; ['nu-cli']]), ['0.21', '0.22']]] | flatten versions | last | get versions",
|
||||
result: Some(vec![Value::from("0.22")]),
|
||||
}
|
||||
]
|
||||
|
@ -83,7 +83,7 @@ async fn format_command(
|
||||
let result = evaluate_baseline_expr(
|
||||
&full_column_path.0,
|
||||
®istry,
|
||||
Scope::append_it(scope.clone(), value.clone()),
|
||||
Scope::append_var(scope.clone(), "$it", value.clone()),
|
||||
)
|
||||
.await;
|
||||
|
||||
|
@ -121,7 +121,7 @@ async fn if_command(
|
||||
let then_case = then_case.clone();
|
||||
let else_case = else_case.clone();
|
||||
let registry = registry.clone();
|
||||
let scope = Scope::append_it(scope.clone(), input);
|
||||
let scope = Scope::append_var(scope.clone(), "$it", input);
|
||||
let mut context = context.clone();
|
||||
|
||||
async move {
|
||||
|
@ -87,7 +87,7 @@ async fn process_row(
|
||||
let for_block = input.clone();
|
||||
let input_stream = once(async { Ok(for_block) }).to_input_stream();
|
||||
|
||||
let scope = Scope::append_it(scope, input.clone());
|
||||
let scope = Scope::append_var(scope, "$it", input.clone());
|
||||
|
||||
let result = run_block(&block, Arc::make_mut(&mut context), input_stream, scope).await;
|
||||
|
||||
@ -140,7 +140,7 @@ async fn process_row(
|
||||
value: UntaggedValue::Primitive(Primitive::Nothing),
|
||||
..
|
||||
} => match scope
|
||||
.it()
|
||||
.var("$it")
|
||||
.unwrap_or_else(|| UntaggedValue::nothing().into_untagged_value())
|
||||
.insert_data_at_column_path(&field, value.clone())
|
||||
{
|
||||
|
@ -37,7 +37,7 @@ impl WholeStreamCommand for IntoInt {
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Convert filesize to integer",
|
||||
example: "echo 1kb | into-int $it | = $it / 1024",
|
||||
example: "into-int 1kb | each { = $it / 1024 }",
|
||||
result: Some(vec![UntaggedValue::int(1).into()]),
|
||||
}]
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ impl WholeStreamCommand for SubCommand {
|
||||
.take_while(move |item| {
|
||||
let condition = condition.clone();
|
||||
let registry = registry.clone();
|
||||
let scope = Scope::append_it(scope.clone(), item.clone());
|
||||
let scope = Scope::append_var(scope.clone(), "$it", item.clone());
|
||||
trace!("ITEM = {:?}", item);
|
||||
|
||||
async move {
|
||||
|
@ -84,7 +84,7 @@ impl WholeStreamCommand for SubCommand {
|
||||
.take_while(move |item| {
|
||||
let condition = condition.clone();
|
||||
let registry = registry.clone();
|
||||
let scope = Scope::append_it(scope.clone(), item.clone());
|
||||
let scope = Scope::append_var(scope.clone(), "$it", item.clone());
|
||||
trace!("ITEM = {:?}", item);
|
||||
|
||||
async move {
|
||||
|
@ -213,7 +213,8 @@ pub async fn fetch(
|
||||
)),
|
||||
};
|
||||
|
||||
let res = std::fs::read(location)?;
|
||||
let res = std::fs::read(location)
|
||||
.map_err(|_| ShellError::labeled_error("Can't open filename given", "can't open", span))?;
|
||||
|
||||
// If no encoding is provided we try to guess the encoding to read the file with
|
||||
let encoding = if encoding_choice.is_none() {
|
||||
|
@ -87,7 +87,7 @@ async fn process_row(
|
||||
let row_clone = row.clone();
|
||||
let input_stream = once(async { Ok(row_clone) }).to_input_stream();
|
||||
|
||||
let scope = Scope::append_it(scope, row);
|
||||
let scope = Scope::append_var(scope, "$it", row);
|
||||
|
||||
Ok(run_block(&block, Arc::make_mut(&mut context), input_stream, scope).await?)
|
||||
}
|
||||
@ -145,7 +145,7 @@ async fn reduce(
|
||||
UntaggedValue::table(&values).into_untagged_value()
|
||||
};
|
||||
|
||||
let scope = Scope::append_var(scope, "$acc".into(), f);
|
||||
let scope = Scope::append_var(scope, "$acc", f);
|
||||
process_row(block, scope, context, row).await
|
||||
}
|
||||
})
|
||||
@ -173,7 +173,7 @@ async fn reduce(
|
||||
UntaggedValue::table(&values).into_untagged_value()
|
||||
};
|
||||
|
||||
let scope = Scope::append_var(scope, "$acc".into(), f);
|
||||
let scope = Scope::append_var(scope, "$acc", f);
|
||||
process_row(block, scope, context, row).await
|
||||
}
|
||||
})
|
||||
|
@ -84,7 +84,7 @@ impl WholeStreamCommand for SubCommand {
|
||||
.skip_while(move |item| {
|
||||
let condition = condition.clone();
|
||||
let registry = registry.clone();
|
||||
let scope = Scope::append_it(scope.clone(), item.clone());
|
||||
let scope = Scope::append_var(scope.clone(), "$it", item.clone());
|
||||
trace!("ITEM = {:?}", item);
|
||||
|
||||
async move {
|
||||
|
@ -85,7 +85,7 @@ impl WholeStreamCommand for SubCommand {
|
||||
let item = item.clone();
|
||||
let condition = condition.clone();
|
||||
let registry = registry.clone();
|
||||
let scope = Scope::append_it(scope.clone(), item.clone());
|
||||
let scope = Scope::append_var(scope.clone(), "$it", item.clone());
|
||||
trace!("ITEM = {:?}", item);
|
||||
|
||||
async move {
|
||||
|
@ -92,7 +92,7 @@ async fn process_row(
|
||||
let for_block = input.clone();
|
||||
let input_stream = once(async { Ok(for_block) }).to_input_stream();
|
||||
|
||||
let scope = Scope::append_it(scope, input.clone());
|
||||
let scope = Scope::append_var(scope, "$it", input.clone());
|
||||
|
||||
let result = run_block(&block, Arc::make_mut(&mut context), input_stream, scope).await;
|
||||
|
||||
@ -149,7 +149,7 @@ async fn process_row(
|
||||
value: UntaggedValue::Primitive(Primitive::Nothing),
|
||||
..
|
||||
} => match scope
|
||||
.it()
|
||||
.var("$it")
|
||||
.unwrap_or_else(|| UntaggedValue::nothing().into_untagged_value())
|
||||
.replace_data_at_column_path(&field, replacement.clone())
|
||||
{
|
||||
|
@ -106,7 +106,7 @@ async fn where_command(
|
||||
.filter_map(move |input| {
|
||||
let condition = condition.clone();
|
||||
let registry = registry.clone();
|
||||
let scope = Scope::append_it(scope.clone(), input.clone());
|
||||
let scope = Scope::append_var(scope.clone(), "$it", input.clone());
|
||||
|
||||
async move {
|
||||
//FIXME: should we use the scope that's brought in as well?
|
||||
|
@ -38,7 +38,7 @@ impl<'s> Flatten<'s> {
|
||||
.collect(),
|
||||
Expression::Command => vec![LocationType::Command.spanned(e.span)],
|
||||
Expression::Path(path) => self.expression(&path.head),
|
||||
Expression::Variable(_) => vec![LocationType::Variable.spanned(e.span)],
|
||||
Expression::Variable(_, _) => vec![LocationType::Variable.spanned(e.span)],
|
||||
|
||||
Expression::Boolean(_)
|
||||
| Expression::FilePath(_)
|
||||
|
@ -32,7 +32,7 @@ pub(crate) async fn evaluate_baseline_expr(
|
||||
Expression::Synthetic(hir::Synthetic::String(s)) => {
|
||||
Ok(UntaggedValue::string(s).into_untagged_value())
|
||||
}
|
||||
Expression::Variable(var) => evaluate_reference(&var, scope, tag),
|
||||
Expression::Variable(var, _) => evaluate_reference(&var, scope, tag),
|
||||
Expression::Command => unimplemented!(),
|
||||
Expression::Invocation(block) => evaluate_invocation(block, registry, scope).await,
|
||||
Expression::ExternalCommand(_) => unimplemented!(),
|
||||
@ -199,38 +199,36 @@ fn evaluate_literal(literal: &hir::Literal, span: Span) -> Value {
|
||||
}
|
||||
}
|
||||
|
||||
fn evaluate_reference(
|
||||
name: &hir::Variable,
|
||||
scope: Arc<Scope>,
|
||||
tag: Tag,
|
||||
) -> Result<Value, ShellError> {
|
||||
fn evaluate_reference(name: &str, scope: Arc<Scope>, tag: Tag) -> Result<Value, ShellError> {
|
||||
match name {
|
||||
hir::Variable::It(_) => match scope.it() {
|
||||
"$nu" => crate::evaluate::variables::nu(&scope.env(), tag),
|
||||
|
||||
"$true" => Ok(Value {
|
||||
value: UntaggedValue::boolean(true),
|
||||
tag,
|
||||
}),
|
||||
|
||||
"$false" => Ok(Value {
|
||||
value: UntaggedValue::boolean(false),
|
||||
tag,
|
||||
}),
|
||||
|
||||
"$it" => match scope.var("$it") {
|
||||
Some(v) => Ok(v),
|
||||
None => Err(ShellError::labeled_error(
|
||||
"$it variable not in scope",
|
||||
"not in scope (are you missing an 'each'?)",
|
||||
"Variable not in scope",
|
||||
"missing '$it' (note: $it is only available inside of a block)",
|
||||
tag.span,
|
||||
)),
|
||||
},
|
||||
hir::Variable::Other(name, _) => match name {
|
||||
x if x == "$nu" => crate::evaluate::variables::nu(&scope.env(), tag),
|
||||
x if x == "$true" => Ok(Value {
|
||||
value: UntaggedValue::boolean(true),
|
||||
tag,
|
||||
}),
|
||||
x if x == "$false" => Ok(Value {
|
||||
value: UntaggedValue::boolean(false),
|
||||
tag,
|
||||
}),
|
||||
x => match scope.var(x) {
|
||||
Some(v) => Ok(v),
|
||||
None => Err(ShellError::labeled_error(
|
||||
"Variable not in scope",
|
||||
"unknown variable",
|
||||
tag.span,
|
||||
)),
|
||||
},
|
||||
|
||||
x => match scope.var(x) {
|
||||
Some(v) => Ok(v),
|
||||
None => Err(ShellError::labeled_error(
|
||||
"Variable not in scope",
|
||||
"unknown variable",
|
||||
tag.span,
|
||||
)),
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -244,7 +242,7 @@ async fn evaluate_invocation(
|
||||
let mut context = EvaluationContext::basic()?;
|
||||
context.registry = registry.clone();
|
||||
|
||||
let input = match scope.it() {
|
||||
let input = match scope.var("$it") {
|
||||
Some(it) => InputStream::one(it),
|
||||
None => InputStream::empty(),
|
||||
};
|
||||
|
@ -200,8 +200,7 @@ fn parse_line(line: &str, ctx: &mut EvaluationContext) -> Result<ClassifiedBlock
|
||||
let lite_result = nu_parser::lite_parse(&line, 0)?;
|
||||
|
||||
// TODO ensure the command whose examples we're testing is actually in the pipeline
|
||||
let mut classified_block = nu_parser::classify_block(&lite_result, ctx.registry());
|
||||
classified_block.block.expand_it_usage();
|
||||
let classified_block = nu_parser::classify_block(&lite_result, ctx.registry());
|
||||
Ok(classified_block)
|
||||
}
|
||||
|
||||
|
@ -737,15 +737,14 @@ impl Shell for FilesystemShell {
|
||||
|
||||
let mut codec = MaybeTextCodec::new(with_encoding);
|
||||
|
||||
match codec.decode(&mut bytes_mut).map_err(|e| {
|
||||
ShellError::unexpected(format!("AsyncRead failed in open function: {:?}", e))
|
||||
match codec.decode(&mut bytes_mut).map_err(|_| {
|
||||
ShellError::labeled_error("Error opening file", "error opening file", name)
|
||||
})? {
|
||||
Some(sb) => Ok(futures::stream::iter(vec![Ok(sb)].into_iter()).boxed()),
|
||||
None => Ok(futures::stream::iter(vec![].into_iter()).boxed()),
|
||||
}
|
||||
} else {
|
||||
// We don't know that this is a finite file, so treat it as a stream
|
||||
|
||||
let f = std::fs::File::open(&path).map_err(|e| {
|
||||
ShellError::labeled_error(
|
||||
format!("Error opening file: {:?}", e),
|
||||
@ -755,8 +754,8 @@ impl Shell for FilesystemShell {
|
||||
})?;
|
||||
let async_reader = futures::io::AllowStdIo::new(f);
|
||||
let sob_stream = FramedRead::new(async_reader, MaybeTextCodec::new(with_encoding))
|
||||
.map_err(|e| {
|
||||
ShellError::unexpected(format!("AsyncRead failed in open function: {:?}", e))
|
||||
.map_err(move |_| {
|
||||
ShellError::labeled_error("Error opening file", "error opening file", name)
|
||||
})
|
||||
.into_stream();
|
||||
|
||||
|
@ -21,7 +21,6 @@ fn adds_a_row_to_the_end() {
|
||||
| lines
|
||||
| append "pollo loco"
|
||||
| nth 3
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -71,7 +71,7 @@ fn cal_sees_pipeline_year() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
echo 1020 | cal --full-year $it | get monday | first 3 | to json
|
||||
cal --full-year 1020 | get monday | first 3 | to json
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -10,7 +10,7 @@ fn filesystem_change_from_current_directory_using_relative_path() {
|
||||
cwd: dirs.root(),
|
||||
r#"
|
||||
cd cd_test_1
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#
|
||||
);
|
||||
|
||||
@ -25,7 +25,7 @@ fn filesystem_change_from_current_directory_using_absolute_path() {
|
||||
cwd: dirs.test(),
|
||||
r#"
|
||||
cd "{}"
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#,
|
||||
dirs.formats()
|
||||
);
|
||||
@ -44,7 +44,7 @@ fn filesystem_switch_back_to_previous_working_directory() {
|
||||
r#"
|
||||
cd {}
|
||||
cd -
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#,
|
||||
dirs.test()
|
||||
);
|
||||
@ -62,7 +62,7 @@ fn filesytem_change_from_current_directory_using_relative_path_and_dash() {
|
||||
cwd: dirs.test(),
|
||||
r#"
|
||||
cd odin/-
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#
|
||||
);
|
||||
|
||||
@ -80,7 +80,7 @@ fn filesystem_change_current_directory_to_parent_directory() {
|
||||
cwd: dirs.test(),
|
||||
r#"
|
||||
cd ..
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#
|
||||
);
|
||||
|
||||
@ -97,7 +97,7 @@ fn filesystem_change_current_directory_to_two_parents_up_using_multiple_dots() {
|
||||
cwd: dirs.test().join("foo/bar"),
|
||||
r#"
|
||||
cd ...
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#
|
||||
);
|
||||
|
||||
@ -116,7 +116,7 @@ fn filesystem_change_current_directory_to_parent_directory_after_delete_cwd() {
|
||||
rm {}/foo/bar
|
||||
echo ","
|
||||
cd ..
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#,
|
||||
dirs.test()
|
||||
);
|
||||
@ -135,7 +135,7 @@ fn filesystem_change_to_home_directory() {
|
||||
cwd: dirs.test(),
|
||||
r#"
|
||||
cd ~
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#
|
||||
);
|
||||
|
||||
@ -152,7 +152,7 @@ fn filesystem_change_to_a_directory_containing_spaces() {
|
||||
cwd: dirs.test(),
|
||||
r#"
|
||||
cd "robalino turner katz"
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#
|
||||
);
|
||||
|
||||
@ -219,7 +219,7 @@ fn filesystem_change_directory_to_symlink_relative() {
|
||||
cwd: dirs.test().join("boo"),
|
||||
r#"
|
||||
cd ../foo_link
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#
|
||||
);
|
||||
|
||||
@ -249,7 +249,7 @@ fn valuesystem_change_from_current_path_using_relative_path() {
|
||||
r#"
|
||||
enter sample.toml
|
||||
cd bin
|
||||
pwd | echo $it
|
||||
pwd
|
||||
exit
|
||||
"#
|
||||
);
|
||||
@ -283,7 +283,7 @@ fn valuesystem_change_from_current_path_using_absolute_path() {
|
||||
enter sample.toml
|
||||
cd bin
|
||||
cd /dependencies
|
||||
pwd | echo $it
|
||||
pwd
|
||||
exit
|
||||
"#
|
||||
);
|
||||
@ -319,7 +319,7 @@ fn valuesystem_switch_back_to_previous_working_path() {
|
||||
cd dependencies
|
||||
cd /bin
|
||||
cd -
|
||||
pwd | echo $it
|
||||
pwd
|
||||
exit
|
||||
"#
|
||||
);
|
||||
@ -353,7 +353,7 @@ fn valuesystem_change_from_current_path_using_relative_path_and_dash() {
|
||||
cd package/-
|
||||
cd /bin
|
||||
cd -
|
||||
pwd | echo $it
|
||||
pwd
|
||||
exit
|
||||
"#
|
||||
);
|
||||
@ -380,7 +380,7 @@ fn valuesystem_change_current_path_to_parent_path() {
|
||||
enter sample.toml
|
||||
cd package/emberenios
|
||||
cd ..
|
||||
pwd | echo $it
|
||||
pwd
|
||||
exit
|
||||
"#
|
||||
);
|
||||
@ -405,7 +405,7 @@ fn valuesystem_change_to_a_path_containing_spaces() {
|
||||
r#"
|
||||
enter sample.toml
|
||||
cd "pa que te"
|
||||
pwd | echo $it
|
||||
pwd
|
||||
exit
|
||||
"#
|
||||
);
|
||||
|
@ -26,7 +26,6 @@ fn discards_rows_where_given_column_is_empty() {
|
||||
| get amigos
|
||||
| compact rusty_luck
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -43,7 +42,6 @@ fn discards_empty_rows_by_default() {
|
||||
| from json
|
||||
| compact
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -27,7 +27,6 @@ fn adds_row_data_if_column_missing() {
|
||||
| default rusty_luck 1
|
||||
| where rusty_luck == 1
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -4,7 +4,7 @@ use nu_test_support::{nu, pipeline};
|
||||
fn drop_rows() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
r#"echo '[{"foo": 3}, {"foo": 8}, {"foo": 4}]' | from json | drop 2 | get foo | math sum | echo $it"#
|
||||
r#"echo '[{"foo": 3}, {"foo": 8}, {"foo": 4}]' | from json | drop 2 | get foo | math sum "#
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "3");
|
||||
|
@ -5,7 +5,7 @@ fn each_works_separately() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats", pipeline(
|
||||
r#"
|
||||
echo [1 2 3] | each { echo $it 10 | math sum } | to json | echo $it
|
||||
echo [1 2 3] | each { echo $it 10 | math sum } | to json
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -5,7 +5,7 @@ fn echo_range_is_lazy() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats", pipeline(
|
||||
r#"
|
||||
echo 1..10000000000 | first 3 | echo $it | to json
|
||||
echo 1..10000000000 | first 3 | to json
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -75,7 +75,7 @@ fn passing_a_block_will_set_contents_on_empty_cells_and_leave_non_empty_ones_unt
|
||||
]
|
||||
| empty? LVL { = 9 }
|
||||
| empty? HP {
|
||||
get LVL | = $it * 1000
|
||||
= $it.LVL * 1000
|
||||
}
|
||||
| math sum
|
||||
| get HP
|
||||
|
@ -18,7 +18,6 @@ fn gets_first_rows_by_amount() {
|
||||
ls
|
||||
| first 3
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -42,7 +41,6 @@ fn gets_all_rows_if_amount_higher_than_all_rows() {
|
||||
ls
|
||||
| first 99
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -61,7 +59,6 @@ fn gets_first_row_when_no_amount_given() {
|
||||
ls
|
||||
| first
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -10,7 +10,6 @@ fn creates_the_resulting_string_from_the_given_fields() {
|
||||
open cargo_sample.toml
|
||||
| get package
|
||||
| format "{name} has license {license}"
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -24,7 +23,6 @@ fn given_fields_can_be_column_paths() {
|
||||
r#"
|
||||
open cargo_sample.toml
|
||||
| format "{package.name} is {package.description}"
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -38,7 +36,6 @@ fn can_use_variables() {
|
||||
r#"
|
||||
open cargo_sample.toml
|
||||
| format "{$it.package.name} is {$it.package.description}"
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -17,7 +17,6 @@ fn fetches_a_row() {
|
||||
r#"
|
||||
open sample.toml
|
||||
| get nu_party_venue
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -44,7 +43,6 @@ fn fetches_by_index() {
|
||||
r#"
|
||||
open sample.toml
|
||||
| get package.authors.2
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -67,7 +65,6 @@ fn fetches_by_column_path() {
|
||||
r#"
|
||||
open sample.toml
|
||||
| get package.name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -93,7 +90,6 @@ fn column_paths_are_either_double_quoted_or_regular_unquoted_words_separated_by_
|
||||
open sample.toml
|
||||
| get package."9999"
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -127,7 +123,6 @@ fn fetches_more_than_one_column_path() {
|
||||
open sample.toml
|
||||
| get fortune_tellers.2.name fortune_tellers.0.name fortune_tellers.1.name
|
||||
| nth 2
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -250,7 +245,7 @@ fn errors_fetching_by_index_out_of_bounds() {
|
||||
fn quoted_column_access() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
r#"echo '[{"foo bar": {"baz": 4}}]' | from json | get "foo bar".baz | echo $it"#
|
||||
r#"echo '[{"foo bar": {"baz": 4}}]' | from json | get "foo bar".baz "#
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "4");
|
||||
|
@ -22,7 +22,6 @@ fn groups() {
|
||||
| group-by rusty_at
|
||||
| get "10/11/2013"
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -22,7 +22,6 @@ fn summarizes_by_column_given() {
|
||||
| histogram rusty_at countries
|
||||
| where rusty_at == "Ecuador"
|
||||
| get countries
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -55,7 +54,6 @@ fn summarizes_by_values() {
|
||||
| histogram
|
||||
| where value == "Estados Unidos"
|
||||
| get count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -8,7 +8,6 @@ fn sets_the_column_from_a_block_run_output() {
|
||||
open cargo_sample.toml
|
||||
| insert dev-dependencies.newdep "1"
|
||||
| get dev-dependencies.newdep
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -24,7 +23,6 @@ fn sets_the_column_from_a_block_full_stream_output() {
|
||||
| insert content { open --raw cargo_sample.toml | lines | first 5 }
|
||||
| get content.1
|
||||
| str contains "nu"
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -40,7 +38,6 @@ fn sets_the_column_from_an_invocation() {
|
||||
| insert content $(open --raw cargo_sample.toml | lines | first 5)
|
||||
| get content.1
|
||||
| str contains "nu"
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -5,7 +5,7 @@ fn into_int_filesize() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
into-int 1kb | = $it / 1024
|
||||
into-int 1kb | each {= $it / 1024 }
|
||||
"#
|
||||
));
|
||||
|
||||
@ -17,7 +17,7 @@ fn into_int_int() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
into-int 1024 | = $it / 1024
|
||||
into-int 1024 | each {= $it / 1024 }
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -23,7 +23,6 @@ fn rows() {
|
||||
| keep 3
|
||||
| get lucky_code
|
||||
| math sum
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -43,7 +43,6 @@ fn condition_is_met() {
|
||||
| str to-int "31/04/2020"
|
||||
| get "31/04/2020"
|
||||
| math sum
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -42,7 +42,6 @@ fn condition_is_met() {
|
||||
| str to-int "31/04/2020"
|
||||
| get "31/04/2020"
|
||||
| math sum
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -6,7 +6,7 @@ use nu_test_support::{nu, pipeline};
|
||||
fn gets_the_last_row() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"ls | sort-by name | last 1 | get name | str trim | echo $it"
|
||||
"ls | sort-by name | last 1 | get name | str trim"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "utf16.ini");
|
||||
@ -28,7 +28,6 @@ fn gets_last_rows_by_amount() {
|
||||
ls
|
||||
| last 3
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -47,7 +46,6 @@ fn gets_last_row_when_no_amount_given() {
|
||||
ls
|
||||
| last
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -13,7 +13,6 @@ fn lines() {
|
||||
| split column "="
|
||||
| get Column1
|
||||
| str trim
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -16,7 +16,6 @@ fn lists_regular_files() {
|
||||
r#"
|
||||
ls
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -39,7 +38,6 @@ fn lists_regular_files_using_asterisk_wildcard() {
|
||||
r#"
|
||||
ls *.txt
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -62,7 +60,6 @@ fn lists_regular_files_using_question_mark_wildcard() {
|
||||
r#"
|
||||
ls *.??.txt
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -90,9 +87,8 @@ fn lists_all_files_in_directories_from_stream() {
|
||||
cwd: dirs.test(), pipeline(
|
||||
r#"
|
||||
echo dir_a dir_b
|
||||
| ls $it
|
||||
| each { ls $it }
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -110,7 +106,6 @@ fn does_not_fail_if_glob_matches_empty_directory() {
|
||||
r#"
|
||||
ls dir_a
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -147,7 +142,7 @@ fn list_files_from_two_parents_up_using_multiple_dots() {
|
||||
let actual = nu!(
|
||||
cwd: dirs.test().join("foo/bar"),
|
||||
r#"
|
||||
ls ... | count | echo $it
|
||||
ls ... | count
|
||||
"#
|
||||
);
|
||||
|
||||
@ -171,7 +166,6 @@ fn lists_hidden_file_when_explicitly_specified() {
|
||||
r#"
|
||||
ls .testdotfile
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -206,7 +200,6 @@ fn lists_all_hidden_files_when_glob_contains_dot() {
|
||||
r#"
|
||||
ls **/.*
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -244,7 +237,6 @@ fn lists_all_hidden_files_when_glob_does_not_contain_dot() {
|
||||
r#"
|
||||
ls **/*
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -268,7 +260,6 @@ fn lists_files_including_starting_with_dot() {
|
||||
r#"
|
||||
ls -a
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -8,7 +8,6 @@ fn can_average_numbers() {
|
||||
open sgml_description.json
|
||||
| get glossary.GlossDiv.GlossList.GlossEntry.Sections
|
||||
| math avg
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -19,7 +18,7 @@ fn can_average_numbers() {
|
||||
fn can_average_bytes() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"ls | sort-by name | skip 1 | first 2 | get size | math avg | format \"{$it}\" | echo $it"
|
||||
"ls | sort-by name | skip 1 | first 2 | get size | math avg | format \"{$it}\" "
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "1.6 KB");
|
||||
|
@ -7,7 +7,6 @@ fn median_numbers_with_even_rows() {
|
||||
r#"
|
||||
echo [10 6 19 21 4]
|
||||
| math median
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -21,7 +20,6 @@ fn median_numbers_with_odd_rows() {
|
||||
r#"
|
||||
echo [3 8 9 12 12 15]
|
||||
| math median
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -35,7 +33,6 @@ fn median_mixed_numbers() {
|
||||
r#"
|
||||
echo [-11.5 -13.5 10]
|
||||
| math median
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -209,18 +209,6 @@ fn duration_math_with_negative() {
|
||||
assert_eq!(actual.out, "-6day");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_math() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats", pipeline(
|
||||
r#"
|
||||
echo 1020 | = $it + 10
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "1030");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compound_comparison() {
|
||||
let actual = nu!(
|
||||
|
@ -26,7 +26,6 @@ fn all() {
|
||||
| get meals
|
||||
| get calories
|
||||
| math sum
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -36,7 +35,7 @@ fn all() {
|
||||
|
||||
#[test]
|
||||
fn outputs_zero_with_no_input() {
|
||||
let actual = nu!(cwd: ".", "math sum | echo $it");
|
||||
let actual = nu!(cwd: ".", "math sum");
|
||||
|
||||
assert_eq!(actual.out, "0");
|
||||
}
|
||||
|
@ -34,7 +34,6 @@ fn row() {
|
||||
| where country in ["Guayaquil Ecuador" "New Zealand"]
|
||||
| get luck
|
||||
| math sum
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -71,7 +71,6 @@ fn show_created_paths() {
|
||||
r#"
|
||||
mkdir -s dir_1 dir_2 dir_3
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -27,7 +27,6 @@ fn moves_a_column_before() {
|
||||
| get chars
|
||||
| str trim
|
||||
| str collect
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -62,7 +61,6 @@ fn moves_columns_before() {
|
||||
| get chars_2 chars_1
|
||||
| str trim
|
||||
| str collect
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -98,7 +96,6 @@ fn moves_a_column_after() {
|
||||
| get chars_1 chars_2
|
||||
| str trim
|
||||
| str collect
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -132,7 +129,6 @@ fn moves_columns_after() {
|
||||
| get
|
||||
| nth 1 2
|
||||
| str collect
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -10,7 +10,6 @@ fn selects_a_row() {
|
||||
| sort-by name
|
||||
| nth 0
|
||||
| get name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -30,10 +29,9 @@ fn selects_many_rows() {
|
||||
| get name
|
||||
| nth 1 0
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "2");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -21,7 +21,6 @@ fn parses_csv() {
|
||||
open nu.zion.csv
|
||||
| where author == "Andres N. Robalino"
|
||||
| get source
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -59,7 +58,7 @@ fn parses_csv() {
|
||||
fn parses_bson() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"open sample.bson | get root | nth 0 | get b | echo $it"
|
||||
"open sample.bson | get root | nth 0 | get b"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "hello");
|
||||
@ -76,7 +75,6 @@ fn parses_more_bson_complexity() {
|
||||
| nth 6
|
||||
| get b
|
||||
| get '$binary_subtype'
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -142,7 +140,6 @@ fn parses_sqlite() {
|
||||
| get table_values
|
||||
| nth 2
|
||||
| get x
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -153,7 +150,7 @@ fn parses_sqlite() {
|
||||
fn parses_toml() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"open cargo_sample.toml | get package.edition | echo $it"
|
||||
"open cargo_sample.toml | get package.edition"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "2018");
|
||||
@ -167,7 +164,6 @@ fn parses_tsv() {
|
||||
open caco3_plastics.tsv
|
||||
| first 1
|
||||
| get origin
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -181,7 +177,6 @@ fn parses_json() {
|
||||
r#"
|
||||
open sgml_description.json
|
||||
| get glossary.GlossDiv.GlossList.GlossEntry.GlossSee
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -192,7 +187,7 @@ fn parses_json() {
|
||||
fn parses_xml() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"open jonathan.xml | get rss.children.channel.children | get item.children | get link.children | echo $it"
|
||||
"open jonathan.xml | get rss.children.channel.children | get item.children | get link.children.0"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
@ -205,7 +200,7 @@ fn parses_xml() {
|
||||
fn parses_ini() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"open sample.ini | get SectionOne.integer | echo $it"
|
||||
"open sample.ini | get SectionOne.integer"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "1234")
|
||||
@ -215,7 +210,7 @@ fn parses_ini() {
|
||||
fn parses_utf16_ini() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"open utf16.ini | get '.ShellClassInfo' | get IconIndex | echo $it"
|
||||
"open utf16.ini | get '.ShellClassInfo' | get IconIndex"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "-236")
|
||||
|
@ -24,7 +24,7 @@ mod simple {
|
||||
| lines
|
||||
| each { echo $it | parse "{Name}={Value}" }
|
||||
| nth 1
|
||||
| echo $it.Value
|
||||
| get Value
|
||||
"#
|
||||
));
|
||||
|
||||
@ -40,7 +40,7 @@ mod simple {
|
||||
r#"
|
||||
echo "{abc}123"
|
||||
| parse "{{abc}{name}"
|
||||
| echo $it.name
|
||||
| get name
|
||||
"#
|
||||
));
|
||||
|
||||
@ -56,7 +56,7 @@ mod simple {
|
||||
r#"
|
||||
echo "(abc)123"
|
||||
| parse "(abc){name}"
|
||||
| echo $it.name
|
||||
| get name
|
||||
"#
|
||||
));
|
||||
|
||||
@ -89,7 +89,7 @@ mod simple {
|
||||
r#"
|
||||
echo "(abc)123"
|
||||
| parse "(abc){name"
|
||||
| echo $it.name
|
||||
| get name
|
||||
"#
|
||||
));
|
||||
|
||||
@ -123,7 +123,6 @@ mod regex {
|
||||
| parse --regex "(?P<Hash>\w+) (?P<Message>.+) \(#(?P<PR>\d+)\)"
|
||||
| nth 1
|
||||
| get PR
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -143,7 +142,6 @@ mod regex {
|
||||
| parse --regex "(\w+) (.+) \(#(\d+)\)"
|
||||
| nth 1
|
||||
| get Capture1
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -163,7 +161,6 @@ mod regex {
|
||||
| parse --regex "(?P<Hash>\w+) (.+) \(#(?P<PR>\d+)\)"
|
||||
| nth 1
|
||||
| get Capture2
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -181,7 +178,6 @@ mod regex {
|
||||
r#"
|
||||
open nushell_git_log_oneline.txt
|
||||
| parse --regex "(?P<Hash>\w+ unfinished capture group"
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -21,7 +21,6 @@ fn adds_a_row_to_the_beginning() {
|
||||
| lines
|
||||
| prepend "pollo loco"
|
||||
| nth 0
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -14,7 +14,6 @@ fn selects_a_row() {
|
||||
| sort-by name
|
||||
| range 0..0
|
||||
| get name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -38,7 +37,6 @@ fn selects_some_rows() {
|
||||
| get name
|
||||
| range 1..2
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -54,7 +54,6 @@ fn reduce_numbered_example() {
|
||||
echo one longest three bar
|
||||
| reduce -n { if $(echo $it.item | str length) > $(echo $acc.item | str length) {echo $it} {echo $acc}}
|
||||
| get index
|
||||
| echo $it
|
||||
"#
|
||||
)
|
||||
);
|
||||
@ -74,7 +73,6 @@ fn folding_with_tables() {
|
||||
}
|
||||
}
|
||||
| math sum
|
||||
| echo $it
|
||||
"#
|
||||
)
|
||||
);
|
||||
|
@ -24,7 +24,6 @@ fn changes_the_column_name() {
|
||||
| rename mosqueteros
|
||||
| get mosqueteros
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -55,7 +54,6 @@ fn keeps_remaining_original_names_given_less_new_names_than_total_original_names
|
||||
| rename mosqueteros
|
||||
| get hit
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -4,7 +4,7 @@ use nu_test_support::nu;
|
||||
fn can_get_reverse_first() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"ls | sort-by name | reverse | first 1 | get name | str trim | echo $it"
|
||||
"ls | sort-by name | reverse | first 1 | get name | str trim "
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "utf16.ini");
|
||||
|
@ -22,7 +22,6 @@ fn regular_columns() {
|
||||
| select rusty_at last_name
|
||||
| nth 0
|
||||
| get last_name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -65,7 +64,6 @@ fn complex_nested_columns() {
|
||||
| select nu."0xATYKARNU" nu.committers.name nu.releases.version
|
||||
| where "nu.releases.version" > "0.8"
|
||||
| get "nu.releases.version"
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -92,7 +90,6 @@ fn allows_if_given_unknown_column_name_is_missing() {
|
||||
open los_tres_caballeros.csv
|
||||
| select rrusty_at first_name
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -42,7 +42,6 @@ fn condition_is_met() {
|
||||
| str to-int "31/04/2020"
|
||||
| get "31/04/2020"
|
||||
| math sum
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -42,7 +42,6 @@ fn condition_is_met() {
|
||||
| str to-int "31/04/2020"
|
||||
| get "31/04/2020"
|
||||
| math sum
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -15,7 +15,6 @@ fn by_column() {
|
||||
| first 1
|
||||
| get Column1
|
||||
| str trim
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -37,7 +36,6 @@ fn by_invalid_column() {
|
||||
| first 1
|
||||
| get Column1
|
||||
| str trim
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -73,7 +71,6 @@ fn sort_primitive_values() {
|
||||
| first 6
|
||||
| sort-by
|
||||
| first 1
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -23,7 +23,6 @@ fn splits() {
|
||||
| split-by type
|
||||
| get A."10/11/2013"
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -20,7 +20,6 @@ fn to_column() {
|
||||
| str trim
|
||||
| split column ","
|
||||
| get Column2
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -20,7 +20,6 @@ fn to_row() {
|
||||
| str trim
|
||||
| split row ","
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -17,7 +17,7 @@ fn trims() {
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | str trim dependency.name | get dependency.name | echo $it"
|
||||
"open sample.toml | str trim dependency.name | get dependency.name"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "nu");
|
||||
@ -50,7 +50,7 @@ fn capitalizes() {
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | str capitalize dependency.name | get dependency.name | echo $it"
|
||||
"open sample.toml | str capitalize dependency.name | get dependency.name"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "Nu");
|
||||
@ -70,7 +70,7 @@ fn downcases() {
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | str downcase dependency.name | get dependency.name | echo $it"
|
||||
"open sample.toml | str downcase dependency.name | get dependency.name"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "light");
|
||||
@ -90,7 +90,7 @@ fn upcases() {
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | str upcase package.name | get package.name | echo $it"
|
||||
"open sample.toml | str upcase package.name | get package.name"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "NUSHELL");
|
||||
@ -110,7 +110,7 @@ fn camelcases() {
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | str camel-case dependency.name | get dependency.name | echo $it"
|
||||
"open sample.toml | str camel-case dependency.name | get dependency.name"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "thisIsATest");
|
||||
@ -128,7 +128,7 @@ fn converts_to_int() {
|
||||
| rename number
|
||||
| where number == 1
|
||||
| get number
|
||||
| echo $it
|
||||
|
||||
"#
|
||||
));
|
||||
|
||||
@ -167,7 +167,6 @@ fn sets() {
|
||||
open sample.toml
|
||||
| str set wykittenshell package.name
|
||||
| get package.name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -192,7 +191,6 @@ fn find_and_replaces() {
|
||||
open sample.toml
|
||||
| str find-replace KATZ "5289" fortune.teller.phone
|
||||
| get fortune.teller.phone
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -217,7 +215,6 @@ fn find_and_replaces_without_passing_field() {
|
||||
open sample.toml
|
||||
| get fortune.teller.phone
|
||||
| str find-replace KATZ "5289"
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -242,7 +239,6 @@ fn substrings_the_input() {
|
||||
open sample.toml
|
||||
| str substring 6,14 fortune.teller.phone
|
||||
| get fortune.teller.phone
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -266,7 +262,6 @@ fn substring_errors_if_start_index_is_greater_than_end_index() {
|
||||
r#"
|
||||
open sample.toml
|
||||
| str substring 6,5 fortune.teller.phone
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -293,7 +288,6 @@ fn substrings_the_input_and_returns_the_string_if_end_index_exceeds_length() {
|
||||
open sample.toml
|
||||
| str substring 0,999 package.name
|
||||
| get package.name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -318,7 +312,6 @@ fn substrings_the_input_and_returns_blank_if_start_index_exceeds_length() {
|
||||
open sample.toml
|
||||
| str substring 50,999 package.name
|
||||
| get package.name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -343,7 +336,6 @@ fn substrings_the_input_and_treats_start_index_as_zero_if_blank_start_index_give
|
||||
open sample.toml
|
||||
| str substring ,2 package.name
|
||||
| get package.name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -368,7 +360,6 @@ fn substrings_the_input_and_treats_end_index_as_length_if_blank_end_index_given(
|
||||
open sample.toml
|
||||
| str substring 3, package.name
|
||||
| get package.name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -23,7 +23,7 @@ fn removes_duplicate_rows() {
|
||||
open los_tres_caballeros.csv
|
||||
| uniq
|
||||
| count
|
||||
| echo $it
|
||||
|
||||
"#
|
||||
));
|
||||
|
||||
@ -53,7 +53,7 @@ fn uniq_values() {
|
||||
| select type
|
||||
| uniq
|
||||
| count
|
||||
| echo $it
|
||||
|
||||
"#
|
||||
));
|
||||
|
||||
@ -118,7 +118,7 @@ fn nested_json_structures() {
|
||||
open nested_json_structures.json
|
||||
| uniq
|
||||
| count
|
||||
| echo $it
|
||||
|
||||
"#
|
||||
));
|
||||
assert_eq!(actual.out, "3");
|
||||
@ -134,7 +134,7 @@ fn uniq_when_keys_out_of_order() {
|
||||
| from json
|
||||
| uniq
|
||||
| count
|
||||
| echo $it
|
||||
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -8,7 +8,6 @@ fn sets_the_column() {
|
||||
open cargo_sample.toml
|
||||
| update dev-dependencies.pretty_assertions "0.7.0"
|
||||
| get dev-dependencies.pretty_assertions
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -24,7 +23,6 @@ fn sets_the_column_from_a_block_run_output() {
|
||||
open cargo_sample.toml
|
||||
| update dev-dependencies.pretty_assertions { open cargo_sample.toml | get dev-dependencies.pretty_assertions | inc --minor }
|
||||
| get dev-dependencies.pretty_assertions
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -40,7 +38,6 @@ fn sets_the_column_from_a_block_full_stream_output() {
|
||||
| update content { open --raw cargo_sample.toml | lines | first 5 }
|
||||
| get content.1
|
||||
| str contains "nu"
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -56,7 +53,6 @@ fn sets_the_column_from_an_invocation() {
|
||||
| update content $(open --raw cargo_sample.toml | lines | first 5)
|
||||
| get content.1
|
||||
| str contains "nu"
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -7,7 +7,7 @@ use nu_test_support::pipeline;
|
||||
fn filters_by_unit_size_comparison() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"ls | where size > 1kb | sort-by size | get name | first 1 | str trim | echo $it"
|
||||
"ls | where size > 1kb | sort-by size | get name | first 1 | str trim"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "cargo_sample.toml");
|
||||
@ -17,7 +17,7 @@ fn filters_by_unit_size_comparison() {
|
||||
fn filters_with_nothing_comparison() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
r#"echo '[{"foo": 3}, {"foo": null}, {"foo": 4}]' | from json | get foo | compact | where $it > 1 | math sum | echo $it"#
|
||||
r#"echo '[{"foo": 3}, {"foo": null}, {"foo": 4}]' | from json | get foo | compact | where $it > 1 | math sum"#
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "7");
|
||||
@ -27,7 +27,7 @@ fn filters_with_nothing_comparison() {
|
||||
fn where_in_table() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
r#"echo '[{"name": "foo", "size": 3}, {"name": "foo", "size": 2}, {"name": "bar", "size": 4}]' | from json | where name in ["foo"] | get size | math sum | echo $it"#
|
||||
r#"echo '[{"name": "foo", "size": 3}, {"name": "foo", "size": 2}, {"name": "bar", "size": 4}]' | from json | where name in ["foo"] | get size | math sum"#
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "5");
|
||||
@ -37,7 +37,7 @@ fn where_in_table() {
|
||||
fn where_not_in_table() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
r#"echo '[{"name": "foo", "size": 3}, {"name": "foo", "size": 2}, {"name": "bar", "size": 4}]' | from json | where name not-in ["foo"] | get size | math sum | echo $it"#
|
||||
r#"echo '[{"name": "foo", "size": 3}, {"name": "foo", "size": 2}, {"name": "bar", "size": 4}]' | from json | where name not-in ["foo"] | get size | math sum"#
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "4");
|
||||
@ -55,7 +55,6 @@ fn explicit_block_condition() {
|
||||
| first 4
|
||||
| where {= $it.z > 4200}
|
||||
| get z
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -74,7 +73,6 @@ fn binary_operator_comparisons() {
|
||||
| first 4
|
||||
| where z > 4200
|
||||
| get z
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -89,7 +87,6 @@ fn binary_operator_comparisons() {
|
||||
| first 4
|
||||
| where z >= 4253
|
||||
| get z
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -104,7 +101,6 @@ fn binary_operator_comparisons() {
|
||||
| first 4
|
||||
| where z < 10
|
||||
| get z
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -119,7 +115,6 @@ fn binary_operator_comparisons() {
|
||||
| first 4
|
||||
| where z <= 1
|
||||
| get z
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -134,7 +129,6 @@ fn binary_operator_comparisons() {
|
||||
| where z != 1
|
||||
| first 1
|
||||
| get z
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -152,7 +146,6 @@ fn contains_operator() {
|
||||
| get table_values
|
||||
| where x =~ ell
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -166,7 +159,6 @@ fn contains_operator() {
|
||||
| get table_values
|
||||
| where x !~ ell
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -24,7 +24,6 @@ fn wrap_rows_into_a_row() {
|
||||
| get caballeros
|
||||
| nth 0
|
||||
| get last_name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -54,7 +53,6 @@ fn wrap_rows_into_a_table() {
|
||||
| wrap caballero
|
||||
| nth 2
|
||||
| get caballero
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -10,7 +10,6 @@ fn table_to_bson_and_back_into_table() {
|
||||
| from bson
|
||||
| get root
|
||||
| get 1.b
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -6,7 +6,7 @@ use nu_test_support::{nu, pipeline};
|
||||
fn table_to_csv_text_and_from_csv_text_back_into_table() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"open caco3_plastics.csv | to csv | from csv | first 1 | get origin | echo $it"
|
||||
"open caco3_plastics.csv | to csv | from csv | first 1 | get origin "
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "SPAIN");
|
||||
@ -35,7 +35,6 @@ fn table_to_csv_text() {
|
||||
| to csv
|
||||
| lines
|
||||
| nth 1
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -66,7 +65,6 @@ fn table_to_csv_text_skipping_headers_after_conversion() {
|
||||
| split column "," a b c d origin
|
||||
| last 1
|
||||
| to csv --headerless
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -96,7 +94,6 @@ fn infers_types() {
|
||||
open los_cuatro_mosqueteros.csv
|
||||
| where rusty_luck > 0
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -124,7 +121,6 @@ fn from_csv_text_to_table() {
|
||||
| from csv
|
||||
| get rusty_luck
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -152,7 +148,6 @@ fn from_csv_text_with_separator_to_table() {
|
||||
| from csv --separator ';'
|
||||
| get rusty_luck
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -180,7 +175,6 @@ fn from_csv_text_with_tab_separator_to_table() {
|
||||
| from csv --separator '\t'
|
||||
| get rusty_luck
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -207,7 +201,6 @@ fn from_csv_text_skipping_headers_to_table() {
|
||||
| from csv --headerless
|
||||
| get Column3
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -12,7 +12,6 @@ fn from_eml_get_to_field() {
|
||||
open sample.eml
|
||||
| get To
|
||||
| get Address
|
||||
| echo $it
|
||||
"#
|
||||
)
|
||||
);
|
||||
@ -26,7 +25,6 @@ fn from_eml_get_to_field() {
|
||||
open sample.eml
|
||||
| get To
|
||||
| get Name
|
||||
| echo $it
|
||||
"#
|
||||
)
|
||||
);
|
||||
@ -44,7 +42,6 @@ fn from_eml_get_replyto_field() {
|
||||
open sample.eml
|
||||
| get Reply-To
|
||||
| get Address
|
||||
| echo $it
|
||||
"#
|
||||
)
|
||||
);
|
||||
@ -58,7 +55,6 @@ fn from_eml_get_replyto_field() {
|
||||
open sample.eml
|
||||
| get Reply-To
|
||||
| get Name
|
||||
| echo $it
|
||||
"#
|
||||
)
|
||||
);
|
||||
@ -74,7 +70,6 @@ fn from_eml_get_subject_field() {
|
||||
r#"
|
||||
open sample.eml
|
||||
| get Subject
|
||||
| echo $it
|
||||
"#
|
||||
)
|
||||
);
|
||||
@ -90,7 +85,6 @@ fn from_eml_get_another_header_field() {
|
||||
r#"
|
||||
open sample.eml
|
||||
| get MIME-Version
|
||||
| echo $it
|
||||
"#
|
||||
)
|
||||
);
|
||||
|
@ -49,7 +49,6 @@ fn infers_types() {
|
||||
open calendar.ics
|
||||
| get events
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -92,7 +91,6 @@ fn from_ics_text_to_table() {
|
||||
| where name == "SUMMARY"
|
||||
| first
|
||||
| get value
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -11,7 +11,6 @@ fn table_to_json_text_and_from_json_text_back_into_table() {
|
||||
| to json
|
||||
| from json
|
||||
| get glossary.GlossDiv.GlossList.GlossEntry.GlossSee
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -37,7 +36,7 @@ fn from_json_text_to_table() {
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
"open katz.txt | from json | get katz | get rusty_luck | count | echo $it"
|
||||
"open katz.txt | from json | get katz | get rusty_luck | count "
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "4");
|
||||
@ -64,7 +63,6 @@ fn from_json_text_recognizing_objects_independently_to_table() {
|
||||
| from json -o
|
||||
| where name == "GorbyPuff"
|
||||
| get rusty_luck
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -94,7 +92,6 @@ fn table_to_json_text() {
|
||||
| from json
|
||||
| nth 0
|
||||
| get name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -9,7 +9,6 @@ fn from_ods_file_to_table() {
|
||||
| get SalesOrders
|
||||
| nth 4
|
||||
| get Column2
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -13,7 +13,6 @@ fn table_to_sqlite_and_back_into_table() {
|
||||
| get table_values
|
||||
| nth 2
|
||||
| get x
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -22,7 +22,6 @@ fn from_ssv_text_to_table() {
|
||||
| from ssv
|
||||
| nth 0
|
||||
| get IP
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -50,7 +49,6 @@ fn from_ssv_text_to_table_with_separator_specified() {
|
||||
| from ssv --minimum-spaces 3
|
||||
| nth 0
|
||||
| get IP
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -77,7 +75,6 @@ fn from_ssv_text_treating_first_line_as_data_with_flag() {
|
||||
| from ssv --headerless -a
|
||||
| first
|
||||
| get Column1
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -88,7 +85,7 @@ fn from_ssv_text_treating_first_line_as_data_with_flag() {
|
||||
| from ssv --headerless
|
||||
| first
|
||||
| get Column1
|
||||
| echo $it
|
||||
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -9,7 +9,6 @@ fn table_to_toml_text_and_from_toml_text_back_into_table() {
|
||||
| to toml
|
||||
| from toml
|
||||
| get package.name
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -6,7 +6,7 @@ use nu_test_support::{nu, pipeline};
|
||||
fn table_to_tsv_text_and_from_tsv_text_back_into_table() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"open caco3_plastics.tsv | to tsv | from tsv | first 1 | get origin | echo $it"
|
||||
"open caco3_plastics.tsv | to tsv | from tsv | first 1 | get origin"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "SPAIN");
|
||||
@ -16,7 +16,7 @@ fn table_to_tsv_text_and_from_tsv_text_back_into_table() {
|
||||
fn table_to_tsv_text_and_from_tsv_text_back_into_table_using_csv_separator() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
r"open caco3_plastics.tsv | to tsv | from csv --separator '\t' | first 1 | get origin | echo $it"
|
||||
r"open caco3_plastics.tsv | to tsv | from csv --separator '\t' | first 1 | get origin"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "SPAIN");
|
||||
@ -44,7 +44,6 @@ fn table_to_tsv_text() {
|
||||
| to tsv
|
||||
| lines
|
||||
| nth 1
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -72,7 +71,6 @@ fn table_to_tsv_text_skipping_headers_after_conversion() {
|
||||
| split column "\t" a b c d origin
|
||||
| last 1
|
||||
| to tsv --headerless
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -100,7 +98,6 @@ fn from_tsv_text_to_table() {
|
||||
| from tsv
|
||||
| get rusty_luck
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -127,7 +124,6 @@ fn from_tsv_text_skipping_headers_to_table() {
|
||||
| from tsv --headerless
|
||||
| get Column3
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -9,7 +9,6 @@ fn can_encode_and_decode_urlencoding() {
|
||||
| to url
|
||||
| from url
|
||||
| get cheese
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -36,7 +36,6 @@ fn infers_types() {
|
||||
r#"
|
||||
open contacts.vcf
|
||||
| count
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -75,7 +74,6 @@ fn from_vcf_text_to_table() {
|
||||
| where name == "EMAIL"
|
||||
| first
|
||||
| get value
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -9,7 +9,6 @@ fn from_excel_file_to_table() {
|
||||
| get SalesOrders
|
||||
| nth 4
|
||||
| get Column2
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -9,7 +9,6 @@ fn table_to_xml_text_and_from_xml_text_back_into_table() {
|
||||
| to xml
|
||||
| from xml
|
||||
| get rss.children.channel.children.0.item.children.0.guid.attributes.isPermaLink
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -9,7 +9,6 @@ fn table_to_yaml_text_and_from_yaml_text_back_into_table() {
|
||||
| to yaml
|
||||
| from yaml
|
||||
| get environment.global.PROJECT_NAME
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -410,7 +410,7 @@ fn parse_variable(
|
||||
registry: &dyn SignatureRegistry,
|
||||
) -> (SpannedExpression, Option<ParseError>) {
|
||||
if lite_arg.item == "$it" {
|
||||
trace!("parsin $it");
|
||||
trace!("parsing $it");
|
||||
parse_full_column_path(lite_arg, registry)
|
||||
} else {
|
||||
(
|
||||
@ -603,10 +603,7 @@ fn parse_interpolated_string(
|
||||
}];
|
||||
|
||||
let call = SpannedExpression {
|
||||
expr: Expression::Invocation(Block {
|
||||
block,
|
||||
span: lite_arg.span,
|
||||
}),
|
||||
expr: Expression::Invocation(Block::new(None, block, lite_arg.span)),
|
||||
span: lite_arg.span,
|
||||
};
|
||||
|
||||
@ -1375,7 +1372,7 @@ fn parse_positional_argument(
|
||||
let span = arg.span;
|
||||
let mut commands = hir::Commands::new(span);
|
||||
commands.push(ClassifiedCommand::Expr(Box::new(arg)));
|
||||
let mut block = hir::Block::new(span);
|
||||
let mut block = hir::Block::new(None, vec![], span);
|
||||
block.push(commands);
|
||||
|
||||
let arg = SpannedExpression::new(Expression::Block(block), span);
|
||||
@ -1771,7 +1768,7 @@ fn expand_shorthand_forms(
|
||||
}
|
||||
|
||||
pub fn classify_block(lite_block: &LiteBlock, registry: &dyn SignatureRegistry) -> ClassifiedBlock {
|
||||
let mut block = Block::new(lite_block.span());
|
||||
let mut block = Block::new(None, vec![], lite_block.span());
|
||||
|
||||
let mut error = None;
|
||||
for lite_pipeline in &lite_block.block {
|
||||
@ -1784,10 +1781,7 @@ pub fn classify_block(lite_block: &LiteBlock, registry: &dyn SignatureRegistry)
|
||||
|
||||
let pipeline = if let Some(vars) = vars {
|
||||
let span = pipeline.commands.span;
|
||||
let block = hir::Block {
|
||||
block: vec![pipeline.commands.clone()],
|
||||
span,
|
||||
};
|
||||
let block = hir::Block::new(None, vec![pipeline.commands.clone()], span);
|
||||
let mut call = hir::Call::new(
|
||||
Box::new(SpannedExpression {
|
||||
expr: Expression::string("with-env".to_string()),
|
||||
|
@ -55,7 +55,7 @@ pub fn expression_to_flat_shape(e: &SpannedExpression) -> Vec<Spanned<FlatShape>
|
||||
Expression::ExternalWord => vec![FlatShape::ExternalWord.spanned(e.span)],
|
||||
Expression::ExternalCommand(_) => vec![FlatShape::ExternalCommand.spanned(e.span)],
|
||||
Expression::Synthetic(_) => vec![FlatShape::BareMember.spanned(e.span)],
|
||||
Expression::Variable(_) => vec![FlatShape::Variable.spanned(e.span)],
|
||||
Expression::Variable(_, _) => vec![FlatShape::Variable.spanned(e.span)],
|
||||
Expression::Binary(binary) => {
|
||||
let mut output = vec![];
|
||||
output.append(&mut expression_to_flat_shape(&binary.left));
|
||||
|
@ -40,14 +40,6 @@ impl InternalCommand {
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_it_usage(&mut self) {
|
||||
if let Some(positionals) = &mut self.args.positional {
|
||||
for arg in positionals {
|
||||
arg.expand_it_usage();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
||||
@ -84,42 +76,6 @@ pub enum ClassifiedCommand {
|
||||
Error(ParseError),
|
||||
}
|
||||
|
||||
impl ClassifiedCommand {
|
||||
pub fn has_it_iteration(&self) -> bool {
|
||||
match self {
|
||||
ClassifiedCommand::Internal(command) => {
|
||||
let mut result = command.args.head.has_shallow_it_usage();
|
||||
|
||||
if let Some(positionals) = &command.args.positional {
|
||||
for arg in positionals {
|
||||
result = result || arg.has_shallow_it_usage();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(named) = &command.args.named {
|
||||
for arg in named.iter() {
|
||||
if let NamedValue::Value(_, value) = arg.1 {
|
||||
result = result || value.has_shallow_it_usage();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
ClassifiedCommand::Expr(expr) => expr.has_shallow_it_usage(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_it_usage(&mut self) {
|
||||
match self {
|
||||
ClassifiedCommand::Internal(command) => command.expand_it_usage(),
|
||||
ClassifiedCommand::Expr(expr) => expr.expand_it_usage(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
||||
pub struct Commands {
|
||||
pub list: Vec<ClassifiedCommand>,
|
||||
@ -134,55 +90,28 @@ impl Commands {
|
||||
pub fn push(&mut self, command: ClassifiedCommand) {
|
||||
self.list.push(command);
|
||||
}
|
||||
|
||||
/// Convert all shallow uses of $it to `each { use of $it }`, converting each to a per-row command
|
||||
pub fn expand_it_usage(&mut self) {
|
||||
for idx in 0..self.list.len() {
|
||||
self.list[idx].expand_it_usage();
|
||||
}
|
||||
for idx in 1..self.list.len() {
|
||||
if self.list[idx].has_it_iteration() {
|
||||
self.list[idx] = ClassifiedCommand::Internal(InternalCommand {
|
||||
name: "each".to_string(),
|
||||
name_span: self.span,
|
||||
args: hir::Call {
|
||||
head: Box::new(SpannedExpression {
|
||||
expr: Expression::Synthetic(Synthetic::String(
|
||||
"expanded-each".to_string(),
|
||||
)),
|
||||
span: self.span,
|
||||
}),
|
||||
named: None,
|
||||
span: self.span,
|
||||
positional: Some(vec![SpannedExpression {
|
||||
expr: Expression::Block(Block {
|
||||
block: vec![Commands {
|
||||
list: vec![self.list[idx].clone()],
|
||||
span: self.span,
|
||||
}],
|
||||
span: self.span,
|
||||
}),
|
||||
span: self.span,
|
||||
}]),
|
||||
external_redirection: ExternalRedirection::Stdout, // FIXME
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
||||
pub struct Block {
|
||||
pub params: Vec<String>,
|
||||
pub block: Vec<Commands>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl Block {
|
||||
pub fn new(span: Span) -> Block {
|
||||
Block {
|
||||
block: vec![],
|
||||
span,
|
||||
pub fn new(params: Option<Vec<String>>, block: Vec<Commands>, span: Span) -> Block {
|
||||
match params {
|
||||
Some(params) => Block {
|
||||
params,
|
||||
block,
|
||||
span,
|
||||
},
|
||||
None => Block {
|
||||
params: vec!["$it".into()],
|
||||
block,
|
||||
span,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -190,13 +119,6 @@ impl Block {
|
||||
self.block.push(commands);
|
||||
}
|
||||
|
||||
/// Convert all shallow uses of $it to `each { use of $it }`, converting each to a per-row command
|
||||
pub fn expand_it_usage(&mut self) {
|
||||
for commands in &mut self.block {
|
||||
commands.expand_it_usage();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_redirect(&mut self, external_redirection: ExternalRedirection) {
|
||||
if let Some(pipeline) = self.block.last_mut() {
|
||||
if let Some(command) = pipeline.list.last_mut() {
|
||||
@ -250,7 +172,7 @@ impl ExternalCommand {
|
||||
..
|
||||
} => {
|
||||
let Path { head, .. } = &**path;
|
||||
matches!(head, SpannedExpression{expr: Expression::Variable(Variable::It(_)), ..})
|
||||
matches!(head, SpannedExpression{expr: Expression::Variable(x, ..), ..} if x == "$it")
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
@ -594,118 +516,6 @@ impl SpannedExpression {
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_shallow_it_usage(&self) -> bool {
|
||||
match &self.expr {
|
||||
Expression::Binary(binary) => {
|
||||
binary.left.has_shallow_it_usage() || binary.right.has_shallow_it_usage()
|
||||
}
|
||||
Expression::Range(range) => {
|
||||
let left = if let Some(left) = &range.left {
|
||||
left.has_shallow_it_usage()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
let right = if let Some(right) = &range.right {
|
||||
right.has_shallow_it_usage()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
left || right
|
||||
}
|
||||
Expression::Variable(Variable::It(_)) => true,
|
||||
Expression::Path(path) => path.head.has_shallow_it_usage(),
|
||||
Expression::List(list) => {
|
||||
for l in list {
|
||||
if l.has_shallow_it_usage() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
Expression::Table(headers, cells) => {
|
||||
for l in headers {
|
||||
if l.has_shallow_it_usage() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
for row in cells {
|
||||
for cell in row {
|
||||
if cell.has_shallow_it_usage() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
Expression::Invocation(block) => {
|
||||
for commands in block.block.iter() {
|
||||
for command in commands.list.iter() {
|
||||
if command.has_it_iteration() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_it_usage(&mut self) {
|
||||
match self {
|
||||
SpannedExpression {
|
||||
expr: Expression::Block(block),
|
||||
..
|
||||
} => {
|
||||
block.expand_it_usage();
|
||||
}
|
||||
SpannedExpression {
|
||||
expr: Expression::Invocation(block),
|
||||
..
|
||||
} => {
|
||||
block.expand_it_usage();
|
||||
}
|
||||
SpannedExpression {
|
||||
expr: Expression::List(list),
|
||||
..
|
||||
} => {
|
||||
for item in list.iter_mut() {
|
||||
item.expand_it_usage();
|
||||
}
|
||||
}
|
||||
SpannedExpression {
|
||||
expr: Expression::Table(headers, cells),
|
||||
..
|
||||
} => {
|
||||
for header in headers.iter_mut() {
|
||||
header.expand_it_usage();
|
||||
}
|
||||
|
||||
for row in cells.iter_mut() {
|
||||
for cell in row {
|
||||
cell.expand_it_usage()
|
||||
}
|
||||
}
|
||||
}
|
||||
SpannedExpression {
|
||||
expr: Expression::Path(path),
|
||||
..
|
||||
} => {
|
||||
if let SpannedExpression {
|
||||
expr: Expression::Invocation(block),
|
||||
..
|
||||
} = &mut path.head
|
||||
{
|
||||
block.expand_it_usage();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for SpannedExpression {
|
||||
@ -745,8 +555,7 @@ impl PrettyDebugWithSource for SpannedExpression {
|
||||
b::delimit("s\"", b::primitive(self.span.slice(source)), "\"").group()
|
||||
}
|
||||
},
|
||||
Expression::Variable(Variable::Other(_, _)) => b::keyword(self.span.slice(source)),
|
||||
Expression::Variable(Variable::It(_)) => b::keyword("$it"),
|
||||
Expression::Variable(_, _) => b::keyword(self.span.slice(source)),
|
||||
Expression::Binary(binary) => binary.pretty_debug(source),
|
||||
Expression::Range(range) => range.pretty_debug(source),
|
||||
Expression::Block(_) => b::opaque("block"),
|
||||
@ -800,8 +609,7 @@ impl PrettyDebugWithSource for SpannedExpression {
|
||||
Expression::Synthetic(s) => match s {
|
||||
Synthetic::String(s) => b::typed("synthetic", b::primitive(format!("{:?}", s))),
|
||||
},
|
||||
Expression::Variable(Variable::Other(_, _)) => b::keyword(self.span.slice(source)),
|
||||
Expression::Variable(Variable::It(_)) => b::keyword("$it"),
|
||||
Expression::Variable(_, _) => b::keyword(self.span.slice(source)),
|
||||
Expression::Binary(binary) => binary.pretty_debug(source),
|
||||
Expression::Range(range) => range.pretty_debug(source),
|
||||
Expression::Block(_) => b::opaque("block"),
|
||||
@ -841,12 +649,6 @@ impl PrettyDebugWithSource for SpannedExpression {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, Hash, Deserialize, Serialize)]
|
||||
pub enum Variable {
|
||||
It(Span),
|
||||
Other(String, Span),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialOrd, Ord, Eq, Hash, PartialEq, Deserialize, Serialize)]
|
||||
pub enum Operator {
|
||||
Equal,
|
||||
@ -1039,7 +841,7 @@ pub enum Expression {
|
||||
Literal(Literal),
|
||||
ExternalWord,
|
||||
Synthetic(Synthetic),
|
||||
Variable(Variable),
|
||||
Variable(String, Span),
|
||||
Binary(Box<Binary>),
|
||||
Range(Box<Range>),
|
||||
Block(hir::Block),
|
||||
@ -1148,11 +950,7 @@ impl Expression {
|
||||
}
|
||||
|
||||
pub fn variable(v: String, span: Span) -> Expression {
|
||||
if v == "$it" {
|
||||
Expression::Variable(Variable::It(span))
|
||||
} else {
|
||||
Expression::Variable(Variable::Other(v, span))
|
||||
}
|
||||
Expression::Variable(v, span)
|
||||
}
|
||||
|
||||
pub fn boolean(b: bool) -> Expression {
|
||||
|
@ -5,8 +5,6 @@ use std::fmt::Debug;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// An evaluation scope. Scopes map variable names to Values and aid in evaluating blocks and expressions.
|
||||
/// Additionally, holds the value for the special $it variable, a variable used to refer to the value passing
|
||||
/// through the pipeline at that moment
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct Scope {
|
||||
vars: IndexMap<String, Value>,
|
||||
@ -63,10 +61,6 @@ impl Scope {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn it(&self) -> Option<Value> {
|
||||
self.var("$it")
|
||||
}
|
||||
|
||||
pub fn from_env(env: IndexMap<String, String>) -> Arc<Scope> {
|
||||
Arc::new(Scope {
|
||||
vars: IndexMap::new(),
|
||||
@ -75,19 +69,9 @@ impl Scope {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn append_it(this: Arc<Self>, it: Value) -> Arc<Scope> {
|
||||
pub fn append_var(this: Arc<Self>, name: impl Into<String>, value: Value) -> Arc<Scope> {
|
||||
let mut vars = IndexMap::new();
|
||||
vars.insert("$it".into(), it);
|
||||
Arc::new(Scope {
|
||||
vars,
|
||||
env: IndexMap::new(),
|
||||
parent: Some(this),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn append_var(this: Arc<Self>, name: String, value: Value) -> Arc<Scope> {
|
||||
let mut vars = IndexMap::new();
|
||||
vars.insert(name, value);
|
||||
vars.insert(name.into(), value);
|
||||
Arc::new(Scope {
|
||||
vars,
|
||||
env: IndexMap::new(),
|
||||
|
@ -14,4 +14,4 @@ RUN rustc -Vv && cargo build --release && \
|
||||
debuild -b -us -uc -i && \
|
||||
dpkg -i ../nu_0.2.0-1_amd64.deb && \
|
||||
chsh -s /usr/bin/nu && \
|
||||
echo 'ls | get name | echo $it' | /usr/bin/nu
|
||||
echo 'ls | get name ' | /usr/bin/nu
|
@ -27,7 +27,7 @@ fn by_one_with_field_passed() {
|
||||
|
||||
let actual = nu_with_plugins!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | inc package.edition | get package.edition | echo $it"
|
||||
"open sample.toml | inc package.edition | get package.edition"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "2019");
|
||||
@ -47,7 +47,7 @@ fn by_one_with_no_field_passed() {
|
||||
|
||||
let actual = nu_with_plugins!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | get package.contributors | inc | echo $it"
|
||||
"open sample.toml | get package.contributors | inc"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "3");
|
||||
@ -67,7 +67,7 @@ fn semversion_major_inc() {
|
||||
|
||||
let actual = nu_with_plugins!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | inc package.version -M | get package.version | echo $it"
|
||||
"open sample.toml | inc package.version -M | get package.version"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "1.0.0");
|
||||
@ -87,7 +87,7 @@ fn semversion_minor_inc() {
|
||||
|
||||
let actual = nu_with_plugins!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | inc package.version --minor | get package.version | echo $it"
|
||||
"open sample.toml | inc package.version --minor | get package.version"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "0.2.0");
|
||||
@ -107,7 +107,7 @@ fn semversion_patch_inc() {
|
||||
|
||||
let actual = nu_with_plugins!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | inc package.version --patch | get package.version | echo $it"
|
||||
"open sample.toml | inc package.version --patch | get package.version"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "0.1.4");
|
||||
@ -127,7 +127,7 @@ fn semversion_without_passing_field() {
|
||||
|
||||
let actual = nu_with_plugins!(
|
||||
cwd: dirs.test(),
|
||||
"open sample.toml | get package.version | inc --patch | echo $it"
|
||||
"open sample.toml | get package.version | inc --patch"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "0.1.4");
|
||||
|
@ -34,7 +34,7 @@ fn automatically_change_directory() {
|
||||
cwd: dirs.test(),
|
||||
r#"
|
||||
autodir
|
||||
pwd | echo $it
|
||||
echo $(pwd)
|
||||
"#
|
||||
);
|
||||
|
||||
@ -53,7 +53,7 @@ fn automatically_change_directory_with_trailing_slash_and_same_name_as_command()
|
||||
cwd: dirs.test(),
|
||||
r#"
|
||||
cd/
|
||||
pwd | echo $it
|
||||
pwd
|
||||
"#
|
||||
);
|
||||
|
||||
@ -87,10 +87,8 @@ mod it_evaluation {
|
||||
ls
|
||||
| sort-by name
|
||||
| get name
|
||||
| nu --testbin cococo $it
|
||||
| lines
|
||||
| each { nu --testbin cococo $it | lines }
|
||||
| nth 1
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -114,10 +112,8 @@ mod it_evaluation {
|
||||
r#"
|
||||
open nu_candies.txt
|
||||
| lines
|
||||
| nu --testbin chop $it
|
||||
| lines
|
||||
| each { nu --testbin chop $it | lines}
|
||||
| nth 1
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -139,8 +135,7 @@ mod it_evaluation {
|
||||
cwd: dirs.test(), pipeline(
|
||||
r#"
|
||||
open sample.toml
|
||||
| nu --testbin cococo $it.nu_party_venue
|
||||
| echo $it
|
||||
| each { nu --testbin cococo $it.nu_party_venue }
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
use nu_test_support::fs::Stub::EmptyFile;
|
||||
#[cfg(feature = "which")]
|
||||
use nu_test_support::fs::Stub::FileWithContent;
|
||||
use nu_test_support::fs::Stub::FileWithContentToBeTrimmed;
|
||||
@ -25,11 +24,8 @@ fn takes_rows_of_nu_value_strings_and_pipes_it_to_stdin_of_external() {
|
||||
r#"
|
||||
open nu_times.csv
|
||||
| get origin
|
||||
| ^echo $it
|
||||
| nu --testbin chop
|
||||
| lines
|
||||
| each { ^echo $it | nu --testbin chop | lines }
|
||||
| nth 2
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -224,54 +220,6 @@ fn autoenv() {
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn proper_it_expansion() {
|
||||
Playground::setup("ls_test_1", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![
|
||||
EmptyFile("andres.txt"),
|
||||
EmptyFile("gedge.txt"),
|
||||
EmptyFile("jonathan.txt"),
|
||||
EmptyFile("yehuda.txt"),
|
||||
]);
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
r#"
|
||||
ls | sort-by name | group-by type | each { get File.name | echo $it } | to json
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(
|
||||
actual.out,
|
||||
r#"["andres.txt","gedge.txt","jonathan.txt","yehuda.txt"]"#
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_expansion_of_list() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo "foo" | echo [bar $it] | to json
|
||||
"#
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "[\"bar\",\"foo\"]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_expansion_of_invocation() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo $(echo "4" | echo $it | str to-int )
|
||||
"#
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "4");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invocation_properly_redirects() {
|
||||
let actual = nu!(
|
||||
@ -289,7 +237,7 @@ fn argument_invocation() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo "foo" | echo $(echo $it)
|
||||
echo "foo" | each { echo $(echo $it) }
|
||||
"#
|
||||
);
|
||||
|
||||
@ -315,9 +263,8 @@ fn invocation_handles_dot() {
|
||||
r#"
|
||||
echo $(open nu_times.csv)
|
||||
| get name
|
||||
| nu --testbin chop $it
|
||||
| each { nu --testbin chop $it | lines }
|
||||
| nth 3
|
||||
| echo $it
|
||||
"#
|
||||
));
|
||||
|
||||
@ -330,7 +277,7 @@ fn string_interpolation_with_it() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo "foo" | echo `{{$it}}`
|
||||
echo "foo" | each { echo `{{$it}}` }
|
||||
"#
|
||||
);
|
||||
|
||||
@ -342,7 +289,7 @@ fn string_interpolation_with_column() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo '{"name": "bob"}' | from json | echo `{{name}} is cool`
|
||||
echo [[name]; [bob]] | each { echo `{{name}} is cool` }
|
||||
"#
|
||||
);
|
||||
|
||||
@ -354,7 +301,7 @@ fn string_interpolation_with_column2() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo '{"name": "fred"}' | from json | echo `also {{name}} is cool`
|
||||
echo [[name]; [fred]] | each { echo `also {{name}} is cool` }
|
||||
"#
|
||||
);
|
||||
|
||||
@ -366,7 +313,7 @@ fn string_interpolation_with_column3() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo '{"name": "sally"}' | from json | echo `also {{name}}`
|
||||
echo [[name]; [sally]] | each { echo `also {{name}}` }
|
||||
"#
|
||||
);
|
||||
|
||||
@ -378,7 +325,7 @@ fn string_interpolation_with_it_column_path() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo '{"name": "sammie"}' | from json | echo `{{$it.name}}`
|
||||
echo [[name]; [sammie]] | each { echo `{{$it.name}}` }
|
||||
"#
|
||||
);
|
||||
|
||||
@ -471,7 +418,7 @@ fn range_with_left_var() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo [[size]; [3]] | echo $it.size..10 | math sum
|
||||
echo [[size]; [3]] | each { echo $it.size..10 } | math sum
|
||||
"#
|
||||
);
|
||||
|
||||
@ -483,7 +430,7 @@ fn range_with_right_var() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo [[size]; [30]] | echo 4..$it.size | math sum
|
||||
echo [[size]; [30]] | each { echo 4..$it.size } | math sum
|
||||
"#
|
||||
);
|
||||
|
||||
@ -562,18 +509,6 @@ fn exclusive_range_with_mixed_types() {
|
||||
assert_eq!(actual.out, "55");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_expansion_of_tables() {
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
r#"
|
||||
echo foo | echo [[`foo {{$it}} bar`]; [`{{$it}} foo`]] | get "foo foo bar"
|
||||
"#
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "foo foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn table_with_commas() {
|
||||
let actual = nu!(
|
||||
@ -591,7 +526,7 @@ fn duration_overflow() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
ls | get modified | = $it + 1000000000000000000yr
|
||||
ls | get modified | each { = $it + 1000000000000000000yr }
|
||||
"#)
|
||||
);
|
||||
|
||||
@ -603,7 +538,7 @@ fn date_and_duration_overflow() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
ls | get modified | = $it + 1000000yr
|
||||
ls | get modified | each { = $it + 1000000yr }
|
||||
"#)
|
||||
);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user