2019-12-31 05:05:02 +01:00
|
|
|
use nu_test_support::fs::Stub::FileWithContentToBeTrimmed;
|
|
|
|
use nu_test_support::playground::Playground;
|
|
|
|
use nu_test_support::{nu, pipeline};
|
|
|
|
|
|
|
|
#[test]
|
2020-01-10 16:44:24 +01:00
|
|
|
fn removes_duplicate_rows() {
|
2019-12-31 05:05:02 +01:00
|
|
|
Playground::setup("uniq_test_1", |dirs, sandbox| {
|
|
|
|
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
|
|
|
"los_tres_caballeros.csv",
|
|
|
|
r#"
|
|
|
|
first_name,last_name,rusty_at,type
|
|
|
|
Andrés,Robalino,10/11/2013,A
|
|
|
|
Jonathan,Turner,10/12/2013,B
|
|
|
|
Yehuda,Katz,10/11/2013,A
|
|
|
|
Jonathan,Turner,10/12/2013,B
|
|
|
|
Yehuda,Katz,10/11/2013,A
|
|
|
|
"#,
|
|
|
|
)]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: dirs.test(), pipeline(
|
|
|
|
r#"
|
|
|
|
open los_tres_caballeros.csv
|
|
|
|
| uniq
|
|
|
|
| count
|
|
|
|
| echo $it
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual, "3");
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2020-01-10 16:44:24 +01:00
|
|
|
fn uniq_values() {
|
2019-12-31 05:05:02 +01:00
|
|
|
Playground::setup("uniq_test_2", |dirs, sandbox| {
|
|
|
|
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
|
|
|
"los_tres_caballeros.csv",
|
|
|
|
r#"
|
|
|
|
first_name,last_name,rusty_at,type
|
|
|
|
Andrés,Robalino,10/11/2013,A
|
|
|
|
Jonathan,Turner,10/12/2013,B
|
|
|
|
Yehuda,Katz,10/11/2013,A
|
|
|
|
Jonathan,Turner,10/12/2013,B
|
|
|
|
Yehuda,Katz,10/11/2013,A
|
|
|
|
"#,
|
|
|
|
)]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: dirs.test(), pipeline(
|
|
|
|
r#"
|
|
|
|
open los_tres_caballeros.csv
|
2020-01-11 07:45:09 +01:00
|
|
|
| pick type
|
2019-12-31 05:05:02 +01:00
|
|
|
| uniq
|
|
|
|
| count
|
|
|
|
| echo $it
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual, "2");
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2020-01-10 16:44:24 +01:00
|
|
|
fn nested_json_structures() {
|
2019-12-31 05:05:02 +01:00
|
|
|
Playground::setup("uniq_test_3", |dirs, sandbox| {
|
|
|
|
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
2020-01-10 16:44:24 +01:00
|
|
|
"nested_json_structures.json",
|
2019-12-31 05:05:02 +01:00
|
|
|
r#"
|
2020-03-13 18:23:41 +01:00
|
|
|
[
|
2020-01-10 16:44:24 +01:00
|
|
|
{
|
|
|
|
"name": "this is duplicated",
|
|
|
|
"nesting": [ { "a": "a", "b": "b" },
|
|
|
|
{ "c": "c", "d": "d" }
|
|
|
|
],
|
|
|
|
"can_be_ordered_differently": {
|
|
|
|
"array": [1, 2, 3, 4, 5],
|
|
|
|
"something": { "else": "works" }
|
|
|
|
}
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"can_be_ordered_differently": {
|
|
|
|
"something": { "else": "works" },
|
|
|
|
"array": [1, 2, 3, 4, 5]
|
|
|
|
},
|
|
|
|
"nesting": [ { "b": "b", "a": "a" },
|
|
|
|
{ "d": "d", "c": "c" }
|
|
|
|
],
|
|
|
|
"name": "this is duplicated"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": "this is unique",
|
|
|
|
"nesting": [ { "a": "b", "b": "a" },
|
|
|
|
{ "c": "d", "d": "c" }
|
|
|
|
],
|
|
|
|
"can_be_ordered_differently": {
|
|
|
|
"array": [],
|
|
|
|
"something": { "else": "does not work" }
|
|
|
|
}
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": "this is unique",
|
|
|
|
"nesting": [ { "a": "a", "b": "b", "c": "c" },
|
|
|
|
{ "d": "d", "e": "e", "f": "f" }
|
|
|
|
],
|
|
|
|
"can_be_ordered_differently": {
|
|
|
|
"array": [],
|
|
|
|
"something": { "else": "works" }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
2019-12-31 05:05:02 +01:00
|
|
|
"#,
|
|
|
|
)]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: dirs.test(), pipeline(
|
|
|
|
r#"
|
2020-01-10 16:44:24 +01:00
|
|
|
open nested_json_structures.json
|
2019-12-31 05:05:02 +01:00
|
|
|
| uniq
|
|
|
|
| count
|
|
|
|
| echo $it
|
|
|
|
"#
|
|
|
|
));
|
2020-01-10 16:44:24 +01:00
|
|
|
assert_eq!(actual, "3");
|
2019-12-31 05:05:02 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn uniq_when_keys_out_of_order() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
|
|
|
r#"
|
|
|
|
echo '[{"a": "a", "b": [1,2,3]},{"b": [1,2,3], "a": "a"}]'
|
|
|
|
| from-json
|
|
|
|
| uniq
|
|
|
|
| count
|
|
|
|
| echo $it
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual, "1");
|
|
|
|
}
|