mirror of
https://github.com/nushell/nushell.git
synced 2024-11-27 19:03:30 +01:00
Merge pull request #486 from androbtech/fromto-checks
[from|to]json additions/refactoring.
This commit is contained in:
commit
f59b78a764
@ -5,21 +5,26 @@ use crate::prelude::*;
|
|||||||
|
|
||||||
pub struct FromJSON;
|
pub struct FromJSON;
|
||||||
|
|
||||||
impl WholeStreamCommand for FromJSON {
|
#[derive(Deserialize)]
|
||||||
fn run(
|
pub struct FromJSONArgs {
|
||||||
&self,
|
objects: bool,
|
||||||
args: CommandArgs,
|
}
|
||||||
registry: &CommandRegistry,
|
|
||||||
) -> Result<OutputStream, ShellError> {
|
|
||||||
from_json(args, registry)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
impl WholeStreamCommand for FromJSON {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"from-json"
|
"from-json"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("from-json")
|
Signature::build("from-json").switch("objects")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
args: CommandArgs,
|
||||||
|
registry: &CommandRegistry,
|
||||||
|
) -> Result<OutputStream, ShellError> {
|
||||||
|
args.process(registry, from_json)?.run()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -64,10 +69,11 @@ pub fn from_json_string_to_value(
|
|||||||
Ok(convert_json_value_to_nu_value(&v, tag))
|
Ok(convert_json_value_to_nu_value(&v, tag))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
fn from_json(
|
||||||
let args = args.evaluate_once(registry)?;
|
FromJSONArgs { objects }: FromJSONArgs,
|
||||||
let span = args.name_span();
|
RunnableContext { input, name, .. }: RunnableContext,
|
||||||
let input = args.input;
|
) -> Result<OutputStream, ShellError> {
|
||||||
|
let name_span = name;
|
||||||
|
|
||||||
let stream = async_stream_block! {
|
let stream = async_stream_block! {
|
||||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||||
@ -86,7 +92,7 @@ fn from_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
|||||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||||
"Expected a string from pipeline",
|
"Expected a string from pipeline",
|
||||||
"requires string input",
|
"requires string input",
|
||||||
span,
|
name_span,
|
||||||
"value originates from here",
|
"value originates from here",
|
||||||
value_tag.span,
|
value_tag.span,
|
||||||
)),
|
)),
|
||||||
@ -94,7 +100,30 @@ fn from_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match from_json_string_to_value(concat_string, span) {
|
|
||||||
|
if objects {
|
||||||
|
for json_str in concat_string.lines() {
|
||||||
|
if json_str.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
match from_json_string_to_value(json_str.to_string(), name_span) {
|
||||||
|
Ok(x) =>
|
||||||
|
yield ReturnSuccess::value(x),
|
||||||
|
Err(_) => {
|
||||||
|
if let Some(last_tag) = latest_tag {
|
||||||
|
yield Err(ShellError::labeled_error_with_secondary(
|
||||||
|
"Could nnot parse as JSON",
|
||||||
|
"input cannot be parsed as JSON",
|
||||||
|
name_span,
|
||||||
|
"value originates from here",
|
||||||
|
last_tag.span))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
match from_json_string_to_value(concat_string, name_span) {
|
||||||
Ok(x) =>
|
Ok(x) =>
|
||||||
match x {
|
match x {
|
||||||
Tagged { item: Value::List(list), .. } => {
|
Tagged { item: Value::List(list), .. } => {
|
||||||
@ -104,15 +133,17 @@ fn from_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
|||||||
}
|
}
|
||||||
x => yield ReturnSuccess::value(x),
|
x => yield ReturnSuccess::value(x),
|
||||||
}
|
}
|
||||||
Err(_) => if let Some(last_tag) = latest_tag {
|
Err(_) => {
|
||||||
|
if let Some(last_tag) = latest_tag {
|
||||||
yield Err(ShellError::labeled_error_with_secondary(
|
yield Err(ShellError::labeled_error_with_secondary(
|
||||||
"Could not parse as JSON",
|
"Could not parse as JSON",
|
||||||
"input cannot be parsed as JSON",
|
"input cannot be parsed as JSON",
|
||||||
span,
|
name_span,
|
||||||
"value originates from here",
|
"value originates from here",
|
||||||
last_tag.span,
|
last_tag.span))
|
||||||
))
|
}
|
||||||
} ,
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ fn ls_lists_regular_files() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd(&full_path),
|
cwd(&full_path),
|
||||||
r#"ls | get name | lines | split-column "." | get Column2 | str Column2 --to-int | sum | echo $it"#
|
r#"ls | get name | lines | split-column "." | get Column2 | str --to-int | sum | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "30");
|
assert_eq!(output, "30");
|
||||||
@ -40,7 +40,7 @@ fn ls_lists_regular_files_using_asterisk_wildcard() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd(&full_path),
|
cwd(&full_path),
|
||||||
"ls *.txt | get name | lines| split-column \".\" | get Column2 | str Column2 --to-int | sum | echo $it"
|
r#"ls *.txt | get name | lines| split-column "." | get Column2 | str --to-int | sum | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "3");
|
assert_eq!(output, "3");
|
||||||
@ -62,7 +62,7 @@ fn ls_lists_regular_files_using_question_mark_wildcard() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd(&full_path),
|
cwd(&full_path),
|
||||||
"ls *.??.txt | get name | lines| split-column \".\" | get Column2 | str Column2 --to-int | sum | echo $it"
|
r#"ls *.??.txt | get name | lines| split-column "." | get Column2 | str --to-int | sum | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "30");
|
assert_eq!(output, "30");
|
||||||
|
@ -17,7 +17,7 @@ fn recognizes_csv() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/nuplayground/open_recognizes_csv_test"),
|
cwd("tests/fixtures/nuplayground/open_recognizes_csv_test"),
|
||||||
"open nu.zion.csv | where author == \"Andres N. Robalino\" | get source | echo $it"
|
r#"open nu.zion.csv | where author == "Andres N. Robalino" | get source | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "Ecuador");
|
assert_eq!(output, "Ecuador");
|
||||||
|
@ -51,7 +51,7 @@ fn rm_removes_files_with_wildcard() {
|
|||||||
nu!(
|
nu!(
|
||||||
_output,
|
_output,
|
||||||
cwd("tests/fixtures/nuplayground/rm_wildcard_test_1"),
|
cwd("tests/fixtures/nuplayground/rm_wildcard_test_1"),
|
||||||
"rm \"src/*/*/*.rs\""
|
r#"rm "src/*/*/*.rs""#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(!h::files_exist_at(
|
assert!(!h::files_exist_at(
|
||||||
|
@ -7,7 +7,7 @@ use helpers as h;
|
|||||||
fn lines() {
|
fn lines() {
|
||||||
nu!(output,
|
nu!(output,
|
||||||
cwd("tests/fixtures/formats"),
|
cwd("tests/fixtures/formats"),
|
||||||
"open cargo_sample.toml --raw | lines | skip-while $it != \"[dependencies]\" | skip 1 | first 1 | split-column \"=\" | get Column1 | trim | echo $it"
|
r#"open cargo_sample.toml --raw | lines | skip-while $it != "[dependencies]" | skip 1 | first 1 | split-column "=" | get Column1 | trim | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "rustyline");
|
assert_eq!(output, "rustyline");
|
||||||
|
@ -4,8 +4,6 @@ use helpers::in_directory as cwd;
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn external_command() {
|
fn external_command() {
|
||||||
// Echo should exist on all currently supported platforms. A better approach might
|
|
||||||
// be to generate a dummy executable as part of the tests with known semantics.
|
|
||||||
nu!(output, cwd("tests/fixtures"), "echo 1");
|
nu!(output, cwd("tests/fixtures"), "echo 1");
|
||||||
|
|
||||||
assert!(output.contains("1"));
|
assert!(output.contains("1"));
|
||||||
|
@ -119,7 +119,7 @@ fn find_and_replaces() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/nuplayground/plugin_str_find_and_replaces_test"),
|
cwd("tests/fixtures/nuplayground/plugin_str_find_and_replaces_test"),
|
||||||
"open sample.toml | str fortune.teller.phone --find-replace KATZ \"5289\" | get fortune.teller.phone | echo $it"
|
r#"open sample.toml | str fortune.teller.phone --find-replace KATZ "5289" | get fortune.teller.phone | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "1-800-5289");
|
assert_eq!(output, "1-800-5289");
|
||||||
@ -140,7 +140,7 @@ fn find_and_replaces_without_passing_field() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/nuplayground/plugin_str_find_and_replaces_without_passing_field_test"),
|
cwd("tests/fixtures/nuplayground/plugin_str_find_and_replaces_without_passing_field_test"),
|
||||||
"open sample.toml | get fortune.teller.phone | str --find-replace KATZ \"5289\" | echo $it"
|
r#"open sample.toml | get fortune.teller.phone | str --find-replace KATZ "5289" | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "1-800-5289");
|
assert_eq!(output, "1-800-5289");
|
||||||
|
@ -16,7 +16,7 @@ fn can_convert_table_to_csv_text_and_from_csv_text_back_into_table() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn converts_structured_table_to_csv_text() {
|
fn converts_structured_table_to_csv_text() {
|
||||||
Playground::setup_for("filter_to_csv_test_1").with_files(vec![FileWithContentToBeTrimmed(
|
Playground::setup_for("filter_to_csv_test_1").with_files(vec![FileWithContentToBeTrimmed(
|
||||||
"sample.csv",
|
"sample.txt",
|
||||||
r#"
|
r#"
|
||||||
importer,shipper,tariff_item,name,origin
|
importer,shipper,tariff_item,name,origin
|
||||||
Plasticos Rival,Reverte,2509000000,Calcium carbonate,Spain
|
Plasticos Rival,Reverte,2509000000,Calcium carbonate,Spain
|
||||||
@ -27,7 +27,7 @@ fn converts_structured_table_to_csv_text() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/nuplayground/filter_to_csv_test_1"),
|
cwd("tests/fixtures/nuplayground/filter_to_csv_test_1"),
|
||||||
"open sample.csv --raw | lines | split-column \",\" a b c d origin | last 1 | to-csv | lines | nth 1 | echo \"$it\""
|
r#"open sample.txt | lines | split-column "," a b c d origin | last 1 | to-csv | lines | nth 1 | echo "$it""#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(output.contains("Tigre Ecuador,OMYA Andina,3824909999,Calcium carbonate,Colombia"));
|
assert!(output.contains("Tigre Ecuador,OMYA Andina,3824909999,Calcium carbonate,Colombia"));
|
||||||
@ -36,7 +36,7 @@ fn converts_structured_table_to_csv_text() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn converts_structured_table_to_csv_text_skipping_headers_after_conversion() {
|
fn converts_structured_table_to_csv_text_skipping_headers_after_conversion() {
|
||||||
Playground::setup_for("filter_to_csv_test_2").with_files(vec![FileWithContentToBeTrimmed(
|
Playground::setup_for("filter_to_csv_test_2").with_files(vec![FileWithContentToBeTrimmed(
|
||||||
"sample.csv",
|
"sample.txt",
|
||||||
r#"
|
r#"
|
||||||
importer,shipper,tariff_item,name,origin
|
importer,shipper,tariff_item,name,origin
|
||||||
Plasticos Rival,Reverte,2509000000,Calcium carbonate,Spain
|
Plasticos Rival,Reverte,2509000000,Calcium carbonate,Spain
|
||||||
@ -47,7 +47,7 @@ fn converts_structured_table_to_csv_text_skipping_headers_after_conversion() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/nuplayground/filter_to_csv_test_2"),
|
cwd("tests/fixtures/nuplayground/filter_to_csv_test_2"),
|
||||||
"open sample.csv --raw | lines | split-column \",\" a b c d origin | last 1 | to-csv --headerless | echo \"$it\""
|
r#"open sample.txt | lines | split-column "," a b c d origin | last 1 | to-csv --headerless | echo "$it""#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(output.contains("Tigre Ecuador,OMYA Andina,3824909999,Calcium carbonate,Colombia"));
|
assert!(output.contains("Tigre Ecuador,OMYA Andina,3824909999,Calcium carbonate,Colombia"));
|
||||||
@ -106,6 +106,71 @@ fn can_convert_table_to_json_text_and_from_json_text_back_into_table() {
|
|||||||
assert_eq!(output, "markup");
|
assert_eq!(output, "markup");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn converts_from_json_text_to_structured_table() {
|
||||||
|
Playground::setup_for("filter_from_json_test_1").with_files(vec![FileWithContentToBeTrimmed(
|
||||||
|
"katz.txt",
|
||||||
|
r#"
|
||||||
|
{
|
||||||
|
"katz": [
|
||||||
|
{"name": "Yehuda", "rusty_luck": 1},
|
||||||
|
{"name": "Jonathan", "rusty_luck": 1},
|
||||||
|
{"name": "Andres", "rusty_luck": 1},
|
||||||
|
{"name":"GorbyPuff", "rusty_luck": 1}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)]);
|
||||||
|
|
||||||
|
nu!(
|
||||||
|
output,
|
||||||
|
cwd("tests/fixtures/nuplayground/filter_from_json_test_1"),
|
||||||
|
"open katz.txt | from-json | get katz | get rusty_luck | sum | echo $it"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(output, "4");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn converts_from_json_text_recognizing_objects_independendtly_to_structured_table() {
|
||||||
|
Playground::setup_for("filter_from_json_test_2").with_files(vec![FileWithContentToBeTrimmed(
|
||||||
|
"katz.txt",
|
||||||
|
r#"
|
||||||
|
{"name": "Yehuda", "rusty_luck": 1}
|
||||||
|
{"name": "Jonathan", "rusty_luck": 1}
|
||||||
|
{"name": "Andres", "rusty_luck": 1}
|
||||||
|
{"name":"GorbyPuff", "rusty_luck": 3}
|
||||||
|
"#,
|
||||||
|
)]);
|
||||||
|
|
||||||
|
nu!(
|
||||||
|
output,
|
||||||
|
cwd("tests/fixtures/nuplayground/filter_from_json_test_2"),
|
||||||
|
r#"open katz.txt | from-json --objects | where name == "GorbyPuff" | get rusty_luck | echo $it"#
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(output, "3");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn converts_structured_table_to_json_text() {
|
||||||
|
Playground::setup_for("filter_to_json_test_1").with_files(vec![FileWithContentToBeTrimmed(
|
||||||
|
"sample.txt",
|
||||||
|
r#"
|
||||||
|
JonAndrehudaTZ,3
|
||||||
|
GorbyPuff,100
|
||||||
|
"#,
|
||||||
|
)]);
|
||||||
|
|
||||||
|
nu!(
|
||||||
|
output,
|
||||||
|
cwd("tests/fixtures/nuplayground/filter_to_json_test_1"),
|
||||||
|
r#"open sample.txt | lines | split-column "," name luck | pick name | to-json | nth 0 | from-json | get name | echo $it"#
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(output, "JonAndrehudaTZ");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_convert_json_text_to_bson_and_back_into_table() {
|
fn can_convert_json_text_to_bson_and_back_into_table() {
|
||||||
nu!(
|
nu!(
|
||||||
@ -144,7 +209,7 @@ fn can_sort_by_column() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/formats"),
|
cwd("tests/fixtures/formats"),
|
||||||
"open cargo_sample.toml --raw | lines | skip 1 | first 4 | split-column \"=\" | sort-by Column1 | skip 1 | first 1 | get Column1 | trim | echo $it"
|
r#"open cargo_sample.toml --raw | lines | skip 1 | first 4 | split-column "=" | sort-by Column1 | skip 1 | first 1 | get Column1 | trim | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "description");
|
assert_eq!(output, "description");
|
||||||
@ -155,7 +220,7 @@ fn can_sort_by_column_reverse() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/formats"),
|
cwd("tests/fixtures/formats"),
|
||||||
"open cargo_sample.toml --raw | lines | skip 1 | first 4 | split-column \"=\" | sort-by Column1 --reverse | skip 1 | first 1 | get Column1 | trim | echo $it"
|
r#"open cargo_sample.toml --raw | lines | skip 1 | first 4 | split-column "=" | sort-by Column1 --reverse | skip 1 | first 1 | get Column1 | trim | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "name");
|
assert_eq!(output, "name");
|
||||||
@ -166,7 +231,7 @@ fn can_split_by_column() {
|
|||||||
nu!(
|
nu!(
|
||||||
output,
|
output,
|
||||||
cwd("tests/fixtures/formats"),
|
cwd("tests/fixtures/formats"),
|
||||||
"open cargo_sample.toml --raw | lines | skip 1 | first 1 | split-column \"=\" | get Column1 | trim | echo $it"
|
r#"open cargo_sample.toml --raw | lines | skip 1 | first 1 | split-column "=" | get Column1 | trim | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "name");
|
assert_eq!(output, "name");
|
||||||
|
@ -27,7 +27,7 @@ fn external_has_correct_quotes() {
|
|||||||
fn add_plugin() {
|
fn add_plugin() {
|
||||||
nu!(output,
|
nu!(output,
|
||||||
cwd("tests/fixtures/formats"),
|
cwd("tests/fixtures/formats"),
|
||||||
"open cargo_sample.toml | add dev-dependencies.newdep \"1\" | get dev-dependencies.newdep | echo $it"
|
r#"open cargo_sample.toml | add dev-dependencies.newdep "1" | get dev-dependencies.newdep | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "1");
|
assert_eq!(output, "1");
|
||||||
@ -37,7 +37,7 @@ fn add_plugin() {
|
|||||||
fn edit_plugin() {
|
fn edit_plugin() {
|
||||||
nu!(output,
|
nu!(output,
|
||||||
cwd("tests/fixtures/formats"),
|
cwd("tests/fixtures/formats"),
|
||||||
"open cargo_sample.toml | edit dev-dependencies.pretty_assertions \"7\" | get dev-dependencies.pretty_assertions | echo $it"
|
r#"open cargo_sample.toml | edit dev-dependencies.pretty_assertions "7" | get dev-dependencies.pretty_assertions | echo $it"#
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(output, "7");
|
assert_eq!(output, "7");
|
||||||
|
Loading…
Reference in New Issue
Block a user