2021-03-13 22:47:31 +01:00
|
|
|
use nu_test_support::fs::Stub::EmptyFile;
|
2023-09-29 17:20:59 +02:00
|
|
|
use nu_test_support::fs::Stub::FileWithContent;
|
2019-12-17 19:54:39 +01:00
|
|
|
use nu_test_support::fs::Stub::FileWithContentToBeTrimmed;
|
|
|
|
use nu_test_support::playground::Playground;
|
2020-05-07 13:03:43 +02:00
|
|
|
use nu_test_support::{nu, pipeline};
|
2019-08-25 14:59:46 +02:00
|
|
|
|
2023-09-29 17:20:59 +02:00
|
|
|
#[test]
|
|
|
|
fn parses_file_with_uppercase_extension() {
|
|
|
|
Playground::setup("open_test_uppercase_extension", |dirs, sandbox| {
|
|
|
|
sandbox.with_files(vec![FileWithContent(
|
|
|
|
"nu.zion.JSON",
|
|
|
|
r#"{
|
|
|
|
"glossary": {
|
|
|
|
"GlossDiv": {
|
|
|
|
"GlossList": {
|
|
|
|
"GlossEntry": {
|
|
|
|
"ID": "SGML"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}"#,
|
|
|
|
)]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: dirs.test(), pipeline(
|
|
|
|
r#"
|
|
|
|
open nu.zion.JSON
|
|
|
|
| get glossary.GlossDiv.GlossList.GlossEntry.ID
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "SGML");
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-10-13 20:45:36 +02:00
|
|
|
#[test]
|
|
|
|
fn parses_file_with_multiple_extensions() {
|
|
|
|
Playground::setup("open_test_multiple_extensions", |dirs, sandbox| {
|
|
|
|
sandbox.with_files(vec![
|
|
|
|
FileWithContent("file.tar.gz", "this is a tar.gz file"),
|
|
|
|
FileWithContent("file.tar.xz", "this is a tar.xz file"),
|
|
|
|
]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: dirs.test(), pipeline(
|
|
|
|
r#"
|
|
|
|
hide "from tar.gz" ;
|
|
|
|
hide "from gz" ;
|
|
|
|
|
|
|
|
def "from tar.gz" [] { 'opened tar.gz' } ;
|
|
|
|
def "from gz" [] { 'opened gz' } ;
|
|
|
|
open file.tar.gz
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "opened tar.gz");
|
|
|
|
|
|
|
|
let actual2 = nu!(
|
|
|
|
cwd: dirs.test(), pipeline(
|
|
|
|
r#"
|
|
|
|
hide "from tar.xz" ;
|
|
|
|
hide "from xz" ;
|
|
|
|
hide "from tar" ;
|
|
|
|
|
|
|
|
def "from tar" [] { 'opened tar' } ;
|
|
|
|
def "from xz" [] { 'opened xz' } ;
|
|
|
|
open file.tar.xz
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual2.out, "opened xz");
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn parses_dotfile() {
|
|
|
|
Playground::setup("open_test_dotfile", |dirs, sandbox| {
|
|
|
|
sandbox.with_files(vec![FileWithContent(
|
|
|
|
".gitignore",
|
|
|
|
r#"
|
|
|
|
/target/
|
|
|
|
"#,
|
|
|
|
)]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: dirs.test(), pipeline(
|
|
|
|
r#"
|
|
|
|
hide "from gitignore" ;
|
|
|
|
|
|
|
|
def "from gitignore" [] { 'opened gitignore' } ;
|
|
|
|
open .gitignore
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "opened gitignore");
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_csv() {
|
2019-08-29 02:32:42 +02:00
|
|
|
Playground::setup("open_test_1", |dirs, sandbox| {
|
2019-08-27 23:45:18 +02:00
|
|
|
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
|
|
|
"nu.zion.csv",
|
|
|
|
r#"
|
2019-08-29 02:32:42 +02:00
|
|
|
author,lang,source
|
2023-03-15 06:54:55 +01:00
|
|
|
JT Turner,Rust,New Zealand
|
2019-08-29 02:32:42 +02:00
|
|
|
Andres N. Robalino,Rust,Ecuador
|
|
|
|
Yehuda Katz,Rust,Estados Unidos
|
2019-08-27 23:45:18 +02:00
|
|
|
"#,
|
2019-08-29 02:32:42 +02:00
|
|
|
)]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
2019-12-15 17:15:06 +01:00
|
|
|
cwd: dirs.test(), pipeline(
|
2019-08-29 02:32:42 +02:00
|
|
|
r#"
|
2019-08-27 23:45:18 +02:00
|
|
|
open nu.zion.csv
|
|
|
|
| where author == "Andres N. Robalino"
|
2022-02-09 11:58:54 +01:00
|
|
|
| get source.0
|
2019-08-29 02:32:42 +02:00
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "Ecuador");
|
2019-08-29 02:32:42 +02:00
|
|
|
})
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2019-09-04 19:36:12 +02:00
|
|
|
// sample.db has the following format:
|
|
|
|
//
|
2022-04-14 05:15:02 +02:00
|
|
|
// ╭─────────┬────────────────╮
|
|
|
|
// │ strings │ [table 6 rows] │
|
|
|
|
// │ ints │ [table 5 rows] │
|
|
|
|
// │ floats │ [table 4 rows] │
|
|
|
|
// ╰─────────┴────────────────╯
|
2019-09-04 19:36:12 +02:00
|
|
|
//
|
|
|
|
// In this case, this represents a sqlite database
|
|
|
|
// with three tables named `strings`, `ints`, and `floats`.
|
|
|
|
//
|
2022-04-14 05:15:02 +02:00
|
|
|
// Each table has different columns. `strings` has `x` and `y`, while
|
|
|
|
// `ints` has just `z`, and `floats` has only the column `f`. In general, when working
|
2019-09-04 19:36:12 +02:00
|
|
|
// with sqlite, one will want to select a single table, e.g.:
|
|
|
|
//
|
2022-04-14 05:15:02 +02:00
|
|
|
// open sample.db | get ints
|
|
|
|
// ╭───┬──────╮
|
|
|
|
// │ # │ z │
|
|
|
|
// ├───┼──────┤
|
|
|
|
// │ 0 │ 1 │
|
|
|
|
// │ 1 │ 42 │
|
|
|
|
// │ 2 │ 425 │
|
|
|
|
// │ 3 │ 4253 │
|
|
|
|
// │ 4 │ │
|
|
|
|
// ╰───┴──────╯
|
|
|
|
|
2022-11-23 01:58:11 +01:00
|
|
|
#[cfg(feature = "sqlite")]
|
2019-08-27 23:45:18 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_sqlite() {
|
2022-04-20 06:58:21 +02:00
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2022-04-20 06:58:21 +02:00
|
|
|
open sample.db
|
|
|
|
| columns
|
|
|
|
| length
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2022-04-20 06:58:21 +02:00
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "3");
|
|
|
|
}
|
|
|
|
|
2022-11-23 01:58:11 +01:00
|
|
|
#[cfg(feature = "sqlite")]
|
2022-04-20 06:58:21 +02:00
|
|
|
#[test]
|
|
|
|
fn parses_sqlite_get_column_name() {
|
2019-08-27 23:45:18 +02:00
|
|
|
let actual = nu!(
|
2019-12-15 17:15:06 +01:00
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2019-08-27 23:45:18 +02:00
|
|
|
open sample.db
|
2022-04-14 05:15:02 +02:00
|
|
|
| get strings
|
|
|
|
| get x.0
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2019-08-27 23:45:18 +02:00
|
|
|
));
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "hello");
|
2019-08-27 23:45:18 +02:00
|
|
|
}
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_toml() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2020-10-26 07:55:52 +01:00
|
|
|
"open cargo_sample.toml | get package.edition"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "2018");
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2019-08-29 11:02:16 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_tsv() {
|
2019-08-29 11:02:16 +02:00
|
|
|
let actual = nu!(
|
2019-12-15 17:15:06 +01:00
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2019-08-29 11:02:16 +02:00
|
|
|
open caco3_plastics.tsv
|
2022-09-29 00:08:17 +02:00
|
|
|
| first
|
2019-08-27 23:45:18 +02:00
|
|
|
| get origin
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2019-08-29 11:02:16 +02:00
|
|
|
));
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "SPAIN")
|
2019-08-29 11:02:16 +02:00
|
|
|
}
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_json() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-12-15 17:15:06 +01:00
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2019-08-27 23:45:18 +02:00
|
|
|
open sgml_description.json
|
|
|
|
| get glossary.GlossDiv.GlossList.GlossEntry.GlossSee
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2019-08-29 02:32:42 +02:00
|
|
|
));
|
2019-08-25 14:59:46 +02:00
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "markup")
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2022-02-16 19:24:45 +01:00
|
|
|
#[test]
|
|
|
|
fn parses_xml() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats",
|
2023-07-21 17:32:37 +02:00
|
|
|
pipeline("
|
2023-03-15 06:54:55 +01:00
|
|
|
open jt.xml
|
2023-03-12 00:35:42 +01:00
|
|
|
| get content
|
|
|
|
| where tag == channel
|
|
|
|
| get content
|
|
|
|
| flatten
|
|
|
|
| where tag == item
|
|
|
|
| get content
|
|
|
|
| flatten
|
|
|
|
| where tag == guid
|
|
|
|
| get content.0.content.0
|
2023-07-21 17:32:37 +02:00
|
|
|
")
|
2022-02-16 19:24:45 +01:00
|
|
|
);
|
|
|
|
|
2023-03-15 06:54:55 +01:00
|
|
|
assert_eq!(actual.out, "https://www.jntrnr.com/off-to-new-adventures/")
|
2022-02-16 19:24:45 +01:00
|
|
|
}
|
|
|
|
|
2022-10-29 05:25:19 +02:00
|
|
|
#[cfg(feature = "dataframe")]
|
2022-09-13 01:30:20 +02:00
|
|
|
#[test]
|
|
|
|
fn parses_arrow_ipc() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2023-02-08 22:52:57 +01:00
|
|
|
dfr open caco3_plastics.arrow
|
|
|
|
| dfr into-nu
|
2022-09-29 00:08:17 +02:00
|
|
|
| first
|
2022-09-13 01:30:20 +02:00
|
|
|
| get origin
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2022-09-13 01:30:20 +02:00
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "SPAIN")
|
|
|
|
}
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
|
|
|
fn errors_if_file_not_found() {
|
2020-05-07 13:03:43 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2019-10-13 06:12:43 +02:00
|
|
|
"open i_dont_exist.txt"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
2022-08-02 16:54:26 +02:00
|
|
|
// Common error code between unixes and Windows for "No such file or directory"
|
|
|
|
//
|
|
|
|
// This seems to be not directly affected by localization compared to the OS
|
|
|
|
// provided error message
|
2023-09-21 20:17:44 +02:00
|
|
|
let expected = "File not found";
|
2022-02-16 19:24:45 +01:00
|
|
|
|
2020-07-03 21:53:20 +02:00
|
|
|
assert!(
|
|
|
|
actual.err.contains(expected),
|
|
|
|
"Error:\n{}\ndoes not contain{}",
|
|
|
|
actual.err,
|
|
|
|
expected
|
|
|
|
);
|
2019-08-26 20:19:05 +02:00
|
|
|
}
|
2021-03-13 22:47:31 +01:00
|
|
|
|
|
|
|
#[test]
|
2023-03-17 20:51:39 +01:00
|
|
|
fn open_wildcard() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2023-03-17 20:51:39 +01:00
|
|
|
open *.nu | where $it =~ echo | length
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2023-03-17 20:51:39 +01:00
|
|
|
));
|
2021-03-13 22:47:31 +01:00
|
|
|
|
2023-03-17 20:51:39 +01:00
|
|
|
assert_eq!(actual.out, "3")
|
|
|
|
}
|
2021-03-13 22:47:31 +01:00
|
|
|
|
2023-03-17 20:51:39 +01:00
|
|
|
#[test]
|
|
|
|
fn open_multiple_files() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2023-03-17 20:51:39 +01:00
|
|
|
open caco3_plastics.csv caco3_plastics.tsv | get tariff_item | math sum
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2023-03-17 20:51:39 +01:00
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "58309279992")
|
2021-03-13 22:47:31 +01:00
|
|
|
}
|
2022-04-01 20:52:32 +02:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_open_block_command() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats",
|
|
|
|
r#"
|
|
|
|
def "from blockcommandparser" [] { lines | split column ",|," }
|
|
|
|
let values = (open sample.blockcommandparser)
|
2023-03-16 23:53:46 +01:00
|
|
|
print ($values | get column1 | get 0)
|
|
|
|
print ($values | get column2 | get 0)
|
|
|
|
print ($values | get column1 | get 1)
|
|
|
|
print ($values | get column2 | get 1)
|
2022-04-01 20:52:32 +02:00
|
|
|
"#
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "abcd")
|
|
|
|
}
|
2022-08-04 13:59:20 +02:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn open_ignore_ansi() {
|
|
|
|
Playground::setup("open_test_ansi", |dirs, sandbox| {
|
|
|
|
sandbox.with_files(vec![EmptyFile("nu.zion.txt")]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: dirs.test(), pipeline(
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2022-08-04 13:59:20 +02:00
|
|
|
ls | find nu.zion | get 0 | get name | open $in
|
2023-07-21 17:32:37 +02:00
|
|
|
"
|
2022-08-04 13:59:20 +02:00
|
|
|
));
|
|
|
|
|
|
|
|
assert!(actual.err.is_empty());
|
|
|
|
})
|
|
|
|
}
|
2022-11-10 00:25:32 +01:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn open_no_parameter() {
|
2023-07-21 17:32:37 +02:00
|
|
|
let actual = nu!("open");
|
2022-11-10 00:25:32 +01:00
|
|
|
|
|
|
|
assert!(actual.err.contains("needs filename"));
|
|
|
|
}
|