From b5029545588d5277809354ce81feee78f2367b4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20N=2E=20Robalino?= Date: Tue, 27 Aug 2019 06:05:51 -0500 Subject: [PATCH] [from|to]json additions/refactoring. --- src/commands/from_json.rs | 91 ++++++++++++++++++++++++++------------- tests/filters_test.rs | 79 ++++++++++++++++++++++++++++++--- 2 files changed, 133 insertions(+), 37 deletions(-) diff --git a/src/commands/from_json.rs b/src/commands/from_json.rs index dba9e0209f..ab4241f219 100644 --- a/src/commands/from_json.rs +++ b/src/commands/from_json.rs @@ -5,21 +5,26 @@ use crate::prelude::*; pub struct FromJSON; -impl WholeStreamCommand for FromJSON { - fn run( - &self, - args: CommandArgs, - registry: &CommandRegistry, - ) -> Result { - from_json(args, registry) - } +#[derive(Deserialize)] +pub struct FromJSONArgs { + objects: bool, +} +impl WholeStreamCommand for FromJSON { fn name(&self) -> &str { "from-json" } fn signature(&self) -> Signature { - Signature::build("from-json") + Signature::build("from-json").switch("objects") + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, from_json)?.run() } } @@ -64,10 +69,11 @@ pub fn from_json_string_to_value( Ok(convert_json_value_to_nu_value(&v, tag)) } -fn from_json(args: CommandArgs, registry: &CommandRegistry) -> Result { - let args = args.evaluate_once(registry)?; - let span = args.name_span(); - let input = args.input; +fn from_json( + FromJSONArgs { objects }: FromJSONArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let name_span = name; let stream = async_stream_block! { let values: Vec> = input.values.collect().await; @@ -86,7 +92,7 @@ fn from_json(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - span, + name_span, "value originates from here", value_tag.span, )), @@ -94,25 +100,50 @@ fn from_json(args: CommandArgs, registry: &CommandRegistry) -> Result - match x { - Tagged { item: Value::List(list), .. } => { - for l in list { - yield ReturnSuccess::value(l); + + if objects { + for json_str in concat_string.lines() { + if json_str.is_empty() { + continue; + } + + match from_json_string_to_value(json_str.to_string(), name_span) { + Ok(x) => + yield ReturnSuccess::value(x), + Err(_) => { + if let Some(last_tag) = latest_tag { + yield Err(ShellError::labeled_error_with_secondary( + "Could nnot parse as JSON", + "input cannot be parsed as JSON", + name_span, + "value originates from here", + last_tag.span)) } } - x => yield ReturnSuccess::value(x), } - Err(_) => if let Some(last_tag) = latest_tag { - yield Err(ShellError::labeled_error_with_secondary( - "Could not parse as JSON", - "input cannot be parsed as JSON", - span, - "value originates from here", - last_tag.span, - )) - } , + } + } else { + match from_json_string_to_value(concat_string, name_span) { + Ok(x) => + match x { + Tagged { item: Value::List(list), .. } => { + for l in list { + yield ReturnSuccess::value(l); + } + } + x => yield ReturnSuccess::value(x), + } + Err(_) => { + if let Some(last_tag) = latest_tag { + yield Err(ShellError::labeled_error_with_secondary( + "Could not parse as JSON", + "input cannot be parsed as JSON", + name_span, + "value originates from here", + last_tag.span)) + } + } + } } }; diff --git a/tests/filters_test.rs b/tests/filters_test.rs index 710f0b89a8..e8ce3db4ad 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -16,7 +16,7 @@ fn can_convert_table_to_csv_text_and_from_csv_text_back_into_table() { #[test] fn converts_structured_table_to_csv_text() { Playground::setup_for("filter_to_csv_test_1").with_files(vec![FileWithContentToBeTrimmed( - "sample.csv", + "sample.txt", r#" importer,shipper,tariff_item,name,origin Plasticos Rival,Reverte,2509000000,Calcium carbonate,Spain @@ -27,7 +27,7 @@ fn converts_structured_table_to_csv_text() { nu!( output, cwd("tests/fixtures/nuplayground/filter_to_csv_test_1"), - "open sample.csv --raw | lines | split-column \",\" a b c d origin | last 1 | to-csv | lines | nth 1 | echo \"$it\"" + r#"open sample.txt | lines | split-column ",s" a b c d origin | last 1 | to-csv | lines | nth 1 | echo "$it""# ); assert!(output.contains("Tigre Ecuador,OMYA Andina,3824909999,Calcium carbonate,Colombia")); @@ -36,7 +36,7 @@ fn converts_structured_table_to_csv_text() { #[test] fn converts_structured_table_to_csv_text_skipping_headers_after_conversion() { Playground::setup_for("filter_to_csv_test_2").with_files(vec![FileWithContentToBeTrimmed( - "sample.csv", + "sample.txt", r#" importer,shipper,tariff_item,name,origin Plasticos Rival,Reverte,2509000000,Calcium carbonate,Spain @@ -47,7 +47,7 @@ fn converts_structured_table_to_csv_text_skipping_headers_after_conversion() { nu!( output, cwd("tests/fixtures/nuplayground/filter_to_csv_test_2"), - "open sample.csv --raw | lines | split-column \",\" a b c d origin | last 1 | to-csv --headerless | echo \"$it\"" + r#"open sample.txt | lines | split-column "," a b c d origin | last 1 | to-csv --headerless | echo "$it""# ); assert!(output.contains("Tigre Ecuador,OMYA Andina,3824909999,Calcium carbonate,Colombia")); @@ -106,6 +106,71 @@ fn can_convert_table_to_json_text_and_from_json_text_back_into_table() { assert_eq!(output, "markup"); } +#[test] +fn converts_from_json_text_to_structured_table() { + Playground::setup_for("filter_from_json_test_1").with_files(vec![FileWithContentToBeTrimmed( + "katz.txt", + r#" + { + "katz": [ + {"name": "Yehuda", "rusty_luck": 1}, + {"name": "Jonathan", "rusty_luck": 1}, + {"name": "Andres", "rusty_luck": 1}, + {"name":"GorbyPuff", "rusty_luck": 1} + ] + } + "#, + )]); + + nu!( + output, + cwd("tests/fixtures/nuplayground/filter_from_json_test_1"), + "open katz.txt | from-json | get katz | get rusty_luck | sum | echo $it" + ); + + assert_eq!(output, "4"); +} + +#[test] +fn converts_from_json_text_recognizing_objects_independendtly_to_structured_table() { + Playground::setup_for("filter_from_json_test_2").with_files(vec![FileWithContentToBeTrimmed( + "katz.txt", + r#" + {"name": "Yehuda", "rusty_luck": 1} + {"name": "Jonathan", "rusty_luck": 1} + {"name": "Andres", "rusty_luck": 1} + {"name":"GorbyPuff", "rusty_luck": 3} + "#, + )]); + + nu!( + output, + cwd("tests/fixtures/nuplayground/filter_from_json_test_2"), + r#"open katz.txt | from-json --objects | where name == "GorbyPuff" | get rusty_luck | echo $it"# + ); + + assert_eq!(output, "3"); +} + +#[test] +fn converts_structured_table_to_json_text() { + Playground::setup_for("filter_to_json_test_1").with_files(vec![FileWithContentToBeTrimmed( + "sample.txt", + r#" + JonAndrehudaTZ,3 + GorbyPuff,100 + "#, + )]); + + nu!( + output, + cwd("tests/fixtures/nuplayground/filter_to_json_test_1"), + r#"open sample.txt | lines | split-column "," name luck | pick name | to-json | nth 0 | from-json | get name | echo $it"# + ); + + assert_eq!(output, "JonAndrehudaTZ"); +} + #[test] fn can_convert_json_text_to_bson_and_back_into_table() { nu!( @@ -144,7 +209,7 @@ fn can_sort_by_column() { nu!( output, cwd("tests/fixtures/formats"), - "open cargo_sample.toml --raw | lines | skip 1 | first 4 | split-column \"=\" | sort-by Column1 | skip 1 | first 1 | get Column1 | trim | echo $it" + r#"open cargo_sample.toml --raw | lines | skip 1 | first 4 | split-column "=" | sort-by Column1 | skip 1 | first 1 | get Column1 | trim | echo $it"# ); assert_eq!(output, "description"); @@ -155,7 +220,7 @@ fn can_sort_by_column_reverse() { nu!( output, cwd("tests/fixtures/formats"), - "open cargo_sample.toml --raw | lines | skip 1 | first 4 | split-column \"=\" | sort-by Column1 --reverse | skip 1 | first 1 | get Column1 | trim | echo $it" + r#"open cargo_sample.toml --raw | lines | skip 1 | first 4 | split-column "=" | sort-by Column1 --reverse | skip 1 | first 1 | get Column1 | trim | echo $it"# ); assert_eq!(output, "name"); @@ -166,7 +231,7 @@ fn can_split_by_column() { nu!( output, cwd("tests/fixtures/formats"), - "open cargo_sample.toml --raw | lines | skip 1 | first 1 | split-column \"=\" | get Column1 | trim | echo $it" + r#"open cargo_sample.toml --raw | lines | skip 1 | first 1 | split-column "=" | get Column1 | trim | echo $it"# ); assert_eq!(output, "name");