Add subcommands. Switch from-* and to-* to them (#1708)

This commit is contained in:
Jonathan Turner
2020-05-04 20:44:33 +12:00
committed by GitHub
parent 453087248a
commit a9968046ed
64 changed files with 278 additions and 152 deletions

View File

@@ -8,7 +8,7 @@ fn alias_args_work() {
cwd: dirs.root(),
r#"
alias double_echo [a b] {echo $a $b}
double_echo 1 2 | to-json
double_echo 1 2 | to json
"#
);

View File

@@ -40,7 +40,7 @@ fn discards_empty_rows_by_default() {
cwd: dirs.test(), pipeline(
r#"
echo "[1,2,3,14,null]"
| from-json
| from json
| compact
| count
| echo $it

View File

@@ -4,7 +4,7 @@ use nu_test_support::nu;
fn drop_rows() {
let actual = nu!(
cwd: "tests/fixtures/formats",
r#"echo '[{"foo": 3}, {"foo": 8}, {"foo": 4}]' | from-json | drop 2 | get foo | sum | echo $it"#
r#"echo '[{"foo": 3}, {"foo": 8}, {"foo": 4}]' | from json | drop 2 | get foo | sum | echo $it"#
);
assert_eq!(actual, "3");

View File

@@ -5,7 +5,7 @@ fn each_works_separately() {
let actual = nu!(
cwd: "tests/fixtures/formats", pipeline(
r#"
echo [1 2 3] | each { echo $it 10 | sum } | to-json | echo $it
echo [1 2 3] | each { echo $it 10 | sum } | to json | echo $it
"#
));

View File

@@ -240,7 +240,7 @@ fn errors_fetching_by_index_out_of_bounds() {
fn quoted_column_access() {
let actual = nu!(
cwd: "tests/fixtures/formats",
r#"echo '[{"foo bar": {"baz": 4}}]' | from-json | get "foo bar".baz | echo $it"#
r#"echo '[{"foo bar": {"baz": 4}}]' | from json | get "foo bar".baz | echo $it"#
);
assert_eq!(actual, "4");

View File

@@ -9,7 +9,7 @@ fn headers_uses_first_row_as_header() {
| get Sheet1
| headers
| get header0
| from-json"#
| from json"#
));
assert_eq!(actual, "r1c0r2c0")
@@ -24,7 +24,7 @@ fn headers_adds_missing_column_name() {
| get Sheet1
| headers
| get Column1
| from-json"#
| from json"#
));
assert_eq!(actual, "r1c1r2c1")

View File

@@ -125,7 +125,7 @@ fn compound_where() {
let actual = nu!(
cwd: "tests/fixtures/formats", pipeline(
r#"
echo '[{"a": 1, "b": 1}, {"a": 2, "b": 1}, {"a": 2, "b": 2}]' | from-json | where a == 2 && b == 1 | to-json
echo '[{"a": 1, "b": 1}, {"a": 2, "b": 1}, {"a": 2, "b": 2}]' | from json | where a == 2 && b == 1 | to json
"#
));
@@ -137,7 +137,7 @@ fn compound_where_paren() {
let actual = nu!(
cwd: "tests/fixtures/formats", pipeline(
r#"
echo '[{"a": 1, "b": 1}, {"a": 2, "b": 1}, {"a": 2, "b": 2}]' | from-json | where (a == 2 && b == 1) || b == 2 | to-json
echo '[{"a": 1, "b": 1}, {"a": 2, "b": 1}, {"a": 2, "b": 2}]' | from json | where (a == 2 && b == 1) || b == 2 | to json
"#
));

View File

@@ -131,7 +131,7 @@ fn uniq_when_keys_out_of_order() {
cwd: "tests/fixtures/formats", pipeline(
r#"
echo '[{"a": "a", "b": [1,2,3]},{"b": [1,2,3], "a": "a"}]'
| from-json
| from json
| uniq
| count
| echo $it

View File

@@ -14,7 +14,7 @@ fn filters_by_unit_size_comparison() {
fn filters_with_nothing_comparison() {
let actual = nu!(
cwd: "tests/fixtures/formats",
r#"echo '[{"foo": 3}, {"foo": null}, {"foo": 4}]' | from-json | where foo > 1 | get foo | sum | echo $it"#
r#"echo '[{"foo": 3}, {"foo": null}, {"foo": 4}]' | from json | where foo > 1 | get foo | sum | echo $it"#
);
assert_eq!(actual, "7");
@@ -24,7 +24,7 @@ fn filters_with_nothing_comparison() {
fn where_in_table() {
let actual = nu!(
cwd: "tests/fixtures/formats",
r#"echo '[{"name": "foo", "size": 3}, {"name": "foo", "size": 2}, {"name": "bar", "size": 4}]' | from-json | where name in: ["foo"] | get size | sum | echo $it"#
r#"echo '[{"name": "foo", "size": 3}, {"name": "foo", "size": 2}, {"name": "bar", "size": 4}]' | from json | where name in: ["foo"] | get size | sum | echo $it"#
);
assert_eq!(actual, "5");
@@ -34,7 +34,7 @@ fn where_in_table() {
fn where_not_in_table() {
let actual = nu!(
cwd: "tests/fixtures/formats",
r#"echo '[{"name": "foo", "size": 3}, {"name": "foo", "size": 2}, {"name": "bar", "size": 4}]' | from-json | where name not-in: ["foo"] | get size | sum | echo $it"#
r#"echo '[{"name": "foo", "size": 3}, {"name": "foo", "size": 2}, {"name": "bar", "size": 4}]' | from json | where name not-in: ["foo"] | get size | sum | echo $it"#
);
assert_eq!(actual, "4");

View File

@@ -19,7 +19,7 @@ fn wrap_rows_into_a_row() {
cwd: dirs.test(), pipeline(
r#"
open los_tres_caballeros.txt
| from-csv
| from csv
| wrap caballeros
| get caballeros
| nth 0
@@ -49,7 +49,7 @@ fn wrap_rows_into_a_table() {
cwd: dirs.test(), pipeline(
r#"
open los_tres_caballeros.txt
| from-csv
| from csv
| get last_name
| wrap caballero
| nth 2

View File

@@ -6,8 +6,8 @@ fn table_to_bson_and_back_into_table() {
cwd: "tests/fixtures/formats", pipeline(
r#"
open sample.bson
| to-bson
| from-bson
| to bson
| from bson
| get root
| get 1.b
| echo $it

View File

@@ -6,7 +6,7 @@ use nu_test_support::{nu, pipeline};
fn table_to_csv_text_and_from_csv_text_back_into_table() {
let actual = nu!(
cwd: "tests/fixtures/formats",
"open caco3_plastics.csv | to-csv | from-csv | first 1 | get origin | echo $it"
"open caco3_plastics.csv | to csv | from csv | first 1 | get origin | echo $it"
);
assert_eq!(actual, "SPAIN");
@@ -32,7 +32,7 @@ fn table_to_csv_text() {
| trim
| split-column "," a b c d origin
| last 1
| to-csv
| to csv
| lines
| nth 1
| echo $it
@@ -63,7 +63,7 @@ fn table_to_csv_text_skipping_headers_after_conversion() {
| trim
| split-column "," a b c d origin
| last 1
| to-csv --headerless
| to csv --headerless
| echo $it
"#
));
@@ -117,7 +117,7 @@ fn from_csv_text_to_table() {
cwd: dirs.test(), pipeline(
r#"
open los_tres_caballeros.txt
| from-csv
| from csv
| get rusty_luck
| count
| echo $it
@@ -145,7 +145,7 @@ fn from_csv_text_with_separator_to_table() {
cwd: dirs.test(), pipeline(
r#"
open los_tres_caballeros.txt
| from-csv --separator ';'
| from csv --separator ';'
| get rusty_luck
| count
| echo $it
@@ -173,7 +173,7 @@ fn from_csv_text_with_tab_separator_to_table() {
cwd: dirs.test(), pipeline(
r#"
open los_tres_caballeros.txt
| from-csv --separator '\t'
| from csv --separator '\t'
| get rusty_luck
| count
| echo $it
@@ -200,7 +200,7 @@ fn from_csv_text_skipping_headers_to_table() {
cwd: dirs.test(), pipeline(
r#"
open los_tres_amigos.txt
| from-csv --headerless
| from csv --headerless
| get Column3
| count
| echo $it

View File

@@ -5,7 +5,7 @@ fn out_html_simple() {
let actual = nu!(
cwd: ".", pipeline(
r#"
echo 3 | to-html
echo 3 | to html
"#
));
@@ -17,7 +17,7 @@ fn out_html_table() {
let actual = nu!(
cwd: ".", pipeline(
r#"
echo '{"name": "jason"}' | from-json | to-html
echo '{"name": "jason"}' | from json | to html
"#
));

View File

@@ -86,7 +86,7 @@ fn from_ics_text_to_table() {
cwd: dirs.test(), pipeline(
r#"
open calendar.txt
| from-ics
| from ics
| get events
| get properties
| where name == "SUMMARY"

View File

@@ -8,8 +8,8 @@ fn table_to_json_text_and_from_json_text_back_into_table() {
cwd: "tests/fixtures/formats", pipeline(
r#"
open sgml_description.json
| to-json
| from-json
| to json
| from json
| get glossary.GlossDiv.GlossList.GlossEntry.GlossSee
| echo $it
"#
@@ -37,7 +37,7 @@ fn from_json_text_to_table() {
let actual = nu!(
cwd: dirs.test(),
"open katz.txt | from-json | get katz | get rusty_luck | count | echo $it"
"open katz.txt | from json | get katz | get rusty_luck | count | echo $it"
);
assert_eq!(actual, "4");
@@ -61,7 +61,7 @@ fn from_json_text_recognizing_objects_independently_to_table() {
cwd: dirs.test(), pipeline(
r#"
open katz.txt
| from-json -o
| from json -o
| where name == "GorbyPuff"
| get rusty_luck
| echo $it
@@ -90,8 +90,8 @@ fn table_to_json_text() {
| lines
| split-column "," name luck
| pick name
| to-json
| from-json
| to json
| from json
| nth 0
| get name
| echo $it

View File

@@ -5,7 +5,7 @@ fn out_md_simple() {
let actual = nu!(
cwd: ".", pipeline(
r#"
echo 3 | to-md
echo 3 | to md
"#
));
@@ -17,7 +17,7 @@ fn out_md_table() {
let actual = nu!(
cwd: ".", pipeline(
r#"
echo '{"name": "jason"}' | from-json | to-md
echo '{"name": "jason"}' | from json | to md
"#
));

View File

@@ -6,8 +6,8 @@ fn table_to_sqlite_and_back_into_table() {
cwd: "tests/fixtures/formats", pipeline(
r#"
open sample.db
| to-sqlite
| from-sqlite
| to sqlite
| from sqlite
| get table_values
| nth 2
| get x

View File

@@ -19,7 +19,7 @@ fn from_ssv_text_to_table() {
cwd: dirs.test(), pipeline(
r#"
open oc_get_svc.txt
| from-ssv
| from ssv
| nth 0
| get IP
| echo $it
@@ -47,7 +47,7 @@ fn from_ssv_text_to_table_with_separator_specified() {
cwd: dirs.test(), pipeline(
r#"
open oc_get_svc.txt
| from-ssv --minimum-spaces 3
| from ssv --minimum-spaces 3
| nth 0
| get IP
| echo $it
@@ -74,7 +74,7 @@ fn from_ssv_text_treating_first_line_as_data_with_flag() {
cwd: dirs.test(), pipeline(
r#"
open oc_get_svc.txt
| from-ssv --headerless -a
| from ssv --headerless -a
| first
| get Column1
| echo $it
@@ -85,7 +85,7 @@ fn from_ssv_text_treating_first_line_as_data_with_flag() {
cwd: dirs.test(), pipeline(
r#"
open oc_get_svc.txt
| from-ssv --headerless
| from ssv --headerless
| first
| get Column1
| echo $it

View File

@@ -6,8 +6,8 @@ fn table_to_toml_text_and_from_toml_text_back_into_table() {
cwd: "tests/fixtures/formats", pipeline(
r#"
open cargo_sample.toml
| to-toml
| from-toml
| to toml
| from toml
| get package.name
| echo $it
"#

View File

@@ -6,7 +6,7 @@ use nu_test_support::{nu, pipeline};
fn table_to_tsv_text_and_from_tsv_text_back_into_table() {
let actual = nu!(
cwd: "tests/fixtures/formats",
"open caco3_plastics.tsv | to-tsv | from-tsv | first 1 | get origin | echo $it"
"open caco3_plastics.tsv | to tsv | from tsv | first 1 | get origin | echo $it"
);
assert_eq!(actual, "SPAIN");
@@ -16,7 +16,7 @@ fn table_to_tsv_text_and_from_tsv_text_back_into_table() {
fn table_to_tsv_text_and_from_tsv_text_back_into_table_using_csv_separator() {
let actual = nu!(
cwd: "tests/fixtures/formats",
r"open caco3_plastics.tsv | to-tsv | from-csv --separator '\t' | first 1 | get origin | echo $it"
r"open caco3_plastics.tsv | to tsv | from csv --separator '\t' | first 1 | get origin | echo $it"
);
assert_eq!(actual, "SPAIN");
@@ -41,7 +41,7 @@ fn table_to_tsv_text() {
| lines
| split-column "\t" a b c d origin
| last 1
| to-tsv
| to tsv
| lines
| nth 1
| echo $it
@@ -71,7 +71,7 @@ fn table_to_tsv_text_skipping_headers_after_conversion() {
| lines
| split-column "\t" a b c d origin
| last 1
| to-tsv --headerless
| to tsv --headerless
| echo $it
"#
));
@@ -97,7 +97,7 @@ fn from_tsv_text_to_table() {
cwd: dirs.test(), pipeline(
r#"
open los_tres_amigos.txt
| from-tsv
| from tsv
| get rusty_luck
| count
| echo $it
@@ -124,7 +124,7 @@ fn from_tsv_text_skipping_headers_to_table() {
cwd: dirs.test(), pipeline(
r#"
open los_tres_amigos.txt
| from-tsv --headerless
| from tsv --headerless
| get Column3
| count
| echo $it

View File

@@ -6,8 +6,8 @@ fn can_encode_and_decode_urlencoding() {
cwd: "tests/fixtures/formats", pipeline(
r#"
open sample.url
| to-url
| from-url
| to url
| from url
| get cheese
| echo $it
"#

View File

@@ -70,7 +70,7 @@ fn from_vcf_text_to_table() {
cwd: dirs.test(), pipeline(
r#"
open contacts.txt
| from-vcf
| from vcf
| get properties
| where name == "EMAIL"
| first

View File

@@ -6,8 +6,8 @@ fn table_to_yaml_text_and_from_yaml_text_back_into_table() {
cwd: "tests/fixtures/formats", pipeline(
r#"
open appveyor.yml
| to-yaml
| from-yaml
| to yaml
| from yaml
| get environment.global.PROJECT_NAME
| echo $it
"#