2021-03-13 22:47:31 +01:00
|
|
|
use nu_test_support::fs::Stub::EmptyFile;
|
2019-12-17 19:54:39 +01:00
|
|
|
use nu_test_support::fs::Stub::FileWithContentToBeTrimmed;
|
|
|
|
use nu_test_support::playground::Playground;
|
2020-05-07 13:03:43 +02:00
|
|
|
use nu_test_support::{nu, pipeline};
|
2019-08-25 14:59:46 +02:00
|
|
|
|
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_csv() {
|
2019-08-29 02:32:42 +02:00
|
|
|
Playground::setup("open_test_1", |dirs, sandbox| {
|
2019-08-27 23:45:18 +02:00
|
|
|
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
|
|
|
"nu.zion.csv",
|
|
|
|
r#"
|
2019-08-29 02:32:42 +02:00
|
|
|
author,lang,source
|
|
|
|
Jonathan Turner,Rust,New Zealand
|
|
|
|
Andres N. Robalino,Rust,Ecuador
|
|
|
|
Yehuda Katz,Rust,Estados Unidos
|
2019-08-27 23:45:18 +02:00
|
|
|
"#,
|
2019-08-29 02:32:42 +02:00
|
|
|
)]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
2019-12-15 17:15:06 +01:00
|
|
|
cwd: dirs.test(), pipeline(
|
2019-08-29 02:32:42 +02:00
|
|
|
r#"
|
2019-08-27 23:45:18 +02:00
|
|
|
open nu.zion.csv
|
|
|
|
| where author == "Andres N. Robalino"
|
2022-02-09 11:58:54 +01:00
|
|
|
| get source.0
|
2019-08-29 02:32:42 +02:00
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "Ecuador");
|
2019-08-29 02:32:42 +02:00
|
|
|
})
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2019-09-04 19:36:12 +02:00
|
|
|
// sample.bson has the following format:
|
|
|
|
// ━━━━━━━━━━┯━━━━━━━━━━━
|
|
|
|
// _id │ root
|
|
|
|
// ──────────┼───────────
|
|
|
|
// [object] │ [9 items]
|
|
|
|
// ━━━━━━━━━━┷━━━━━━━━━━━
|
|
|
|
//
|
|
|
|
// the root value is:
|
|
|
|
// ━━━┯━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━
|
|
|
|
// # │ _id │ a │ b │ c
|
|
|
|
// ───┼───────────────────┼─────────────────────────┼──────────┼──────────
|
|
|
|
// 0 │ [object] │ 1.000000000000000 │ hello │ [2 items]
|
|
|
|
// 1 │ [object] │ 42.00000000000000 │ whel │ hello
|
|
|
|
// 2 │ [object] │ [object] │ │
|
|
|
|
// 3 │ [object] │ │ [object] │
|
|
|
|
// 4 │ [object] │ │ │ [object]
|
|
|
|
// 5 │ [object] │ │ │ [object]
|
|
|
|
// 6 │ [object] │ [object] │ [object] │
|
|
|
|
// 7 │ [object] │ <date value> │ [object] │
|
|
|
|
// 8 │ 1.000000 │ <decimal value> │ [object] │
|
|
|
|
//
|
|
|
|
// The decimal value is supposed to be π, but is currently wrong due to
|
|
|
|
// what appears to be an issue in the bson library that is under investigation.
|
|
|
|
//
|
|
|
|
|
2020-07-18 03:59:23 +02:00
|
|
|
#[cfg(feature = "bson")]
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_bson() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2022-02-09 15:59:40 +01:00
|
|
|
"open sample.bson | get root | select 0 | get b"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "hello");
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2020-07-18 03:59:23 +02:00
|
|
|
#[cfg(feature = "bson")]
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_more_bson_complexity() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-12-15 17:15:06 +01:00
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2019-08-29 08:31:56 +02:00
|
|
|
r#"
|
2019-08-27 23:45:18 +02:00
|
|
|
open sample.bson
|
|
|
|
| get root
|
2022-02-09 15:59:40 +01:00
|
|
|
| select 6
|
2019-08-27 23:45:18 +02:00
|
|
|
| get b
|
|
|
|
| get '$binary_subtype'
|
2019-08-29 08:31:56 +02:00
|
|
|
"#
|
|
|
|
));
|
2019-08-29 02:32:42 +02:00
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "function");
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2019-09-04 19:36:12 +02:00
|
|
|
// sample.db has the following format:
|
|
|
|
//
|
2022-04-14 05:15:02 +02:00
|
|
|
// ╭─────────┬────────────────╮
|
|
|
|
// │ strings │ [table 6 rows] │
|
|
|
|
// │ ints │ [table 5 rows] │
|
|
|
|
// │ floats │ [table 4 rows] │
|
|
|
|
// ╰─────────┴────────────────╯
|
2019-09-04 19:36:12 +02:00
|
|
|
//
|
|
|
|
// In this case, this represents a sqlite database
|
|
|
|
// with three tables named `strings`, `ints`, and `floats`.
|
|
|
|
//
|
2022-04-14 05:15:02 +02:00
|
|
|
// Each table has different columns. `strings` has `x` and `y`, while
|
|
|
|
// `ints` has just `z`, and `floats` has only the column `f`. In general, when working
|
2019-09-04 19:36:12 +02:00
|
|
|
// with sqlite, one will want to select a single table, e.g.:
|
|
|
|
//
|
2022-04-14 05:15:02 +02:00
|
|
|
// open sample.db | get ints
|
|
|
|
// ╭───┬──────╮
|
|
|
|
// │ # │ z │
|
|
|
|
// ├───┼──────┤
|
|
|
|
// │ 0 │ 1 │
|
|
|
|
// │ 1 │ 42 │
|
|
|
|
// │ 2 │ 425 │
|
|
|
|
// │ 3 │ 4253 │
|
|
|
|
// │ 4 │ │
|
|
|
|
// ╰───┴──────╯
|
|
|
|
|
2022-04-24 11:29:21 +02:00
|
|
|
#[cfg(feature = "database")]
|
2019-08-27 23:45:18 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_sqlite() {
|
2022-04-20 06:58:21 +02:00
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
|
|
|
r#"
|
|
|
|
open sample.db
|
|
|
|
| columns
|
|
|
|
| length
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "3");
|
|
|
|
}
|
|
|
|
|
2022-04-24 11:29:21 +02:00
|
|
|
#[cfg(feature = "database")]
|
2022-04-20 06:58:21 +02:00
|
|
|
#[test]
|
|
|
|
fn parses_sqlite_get_column_name() {
|
2019-08-27 23:45:18 +02:00
|
|
|
let actual = nu!(
|
2019-12-15 17:15:06 +01:00
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2019-08-27 23:45:18 +02:00
|
|
|
r#"
|
|
|
|
open sample.db
|
2022-04-14 05:15:02 +02:00
|
|
|
| get strings
|
|
|
|
| get x.0
|
Restructure and streamline token expansion (#1123)
Restructure and streamline token expansion
The purpose of this commit is to streamline the token expansion code, by
removing aspects of the code that are no longer relevant, removing
pointless duplication, and eliminating the need to pass the same
arguments to `expand_syntax`.
The first big-picture change in this commit is that instead of a handful
of `expand_` functions, which take a TokensIterator and ExpandContext, a
smaller number of methods on the `TokensIterator` do the same job.
The second big-picture change in this commit is fully eliminating the
coloring traits, making coloring a responsibility of the base expansion
implementations. This also means that the coloring tracer is merged into
the expansion tracer, so you can follow a single expansion and see how
the expansion process produced colored tokens.
One side effect of this change is that the expander itself is marginally
more error-correcting. The error correction works by switching from
structured expansion to `BackoffColoringMode` when an unexpected token
is found, which guarantees that all spans of the source are colored, but
may not be the most optimal error recovery strategy.
That said, because `BackoffColoringMode` only extends as far as a
closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in
fairly granular correction strategy.
The current code still produces an `Err` (plus a complete list of
colored shapes) from the parsing process if any errors are encountered,
but this could easily be addressed now that the underlying expansion is
error-correcting.
This commit also colors any spans that are syntax errors in red, and
causes the parser to include some additional information about what
tokens were expected at any given point where an error was encountered,
so that completions and hinting could be more robust in the future.
Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 23:45:03 +01:00
|
|
|
"#
|
2019-08-27 23:45:18 +02:00
|
|
|
));
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "hello");
|
2019-08-27 23:45:18 +02:00
|
|
|
}
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_toml() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2020-10-26 07:55:52 +01:00
|
|
|
"open cargo_sample.toml | get package.edition"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "2018");
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2019-08-29 11:02:16 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_tsv() {
|
2019-08-29 11:02:16 +02:00
|
|
|
let actual = nu!(
|
2019-12-15 17:15:06 +01:00
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2019-08-29 11:02:16 +02:00
|
|
|
r#"
|
|
|
|
open caco3_plastics.tsv
|
2022-09-29 00:08:17 +02:00
|
|
|
| first
|
2019-08-27 23:45:18 +02:00
|
|
|
| get origin
|
2019-08-29 11:02:16 +02:00
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "SPAIN")
|
2019-08-29 11:02:16 +02:00
|
|
|
}
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_json() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-12-15 17:15:06 +01:00
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
2019-08-29 02:32:42 +02:00
|
|
|
r#"
|
2019-08-27 23:45:18 +02:00
|
|
|
open sgml_description.json
|
|
|
|
| get glossary.GlossDiv.GlossList.GlossEntry.GlossSee
|
2019-08-29 02:32:42 +02:00
|
|
|
"#
|
|
|
|
));
|
2019-08-25 14:59:46 +02:00
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "markup")
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2022-02-16 19:24:45 +01:00
|
|
|
#[test]
|
|
|
|
fn parses_xml() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats",
|
|
|
|
"open jonathan.xml | get rss.children.channel.children | get item.children | get link.children.0.3.3.0"
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
actual.out,
|
|
|
|
"http://www.jonathanturner.org/2015/10/off-to-new-adventures.html"
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_ini() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2020-10-26 07:55:52 +01:00
|
|
|
"open sample.ini | get SectionOne.integer"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "1234")
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2019-12-15 17:15:06 +01:00
|
|
|
fn parses_utf16_ini() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2022-02-04 03:01:45 +01:00
|
|
|
"open ./utf16.ini --raw | decode utf-16 | from ini | rename info | get info | get IconIndex"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
|
|
|
|
2020-05-07 13:03:43 +02:00
|
|
|
assert_eq!(actual.out, "-236")
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2022-10-29 05:25:19 +02:00
|
|
|
#[cfg(feature = "dataframe")]
|
2022-09-13 01:30:20 +02:00
|
|
|
#[test]
|
|
|
|
fn parses_arrow_ipc() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats", pipeline(
|
|
|
|
r#"
|
|
|
|
open-df caco3_plastics.arrow
|
|
|
|
| into nu
|
2022-09-29 00:08:17 +02:00
|
|
|
| first
|
2022-09-13 01:30:20 +02:00
|
|
|
| get origin
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "SPAIN")
|
|
|
|
}
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
|
|
|
fn errors_if_file_not_found() {
|
2020-05-07 13:03:43 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2019-10-13 06:12:43 +02:00
|
|
|
"open i_dont_exist.txt"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
2022-08-02 16:54:26 +02:00
|
|
|
// Common error code between unixes and Windows for "No such file or directory"
|
|
|
|
//
|
|
|
|
// This seems to be not directly affected by localization compared to the OS
|
|
|
|
// provided error message
|
|
|
|
let expected = "(os error 2)";
|
2022-02-16 19:24:45 +01:00
|
|
|
|
2020-07-03 21:53:20 +02:00
|
|
|
assert!(
|
|
|
|
actual.err.contains(expected),
|
|
|
|
"Error:\n{}\ndoes not contain{}",
|
|
|
|
actual.err,
|
|
|
|
expected
|
|
|
|
);
|
2019-08-26 20:19:05 +02:00
|
|
|
}
|
2021-03-13 22:47:31 +01:00
|
|
|
|
2022-02-04 03:01:45 +01:00
|
|
|
// FIXME: jt: I think `open` on a directory is confusing. We should make discuss this one a bit more
|
|
|
|
#[ignore]
|
2021-03-13 22:47:31 +01:00
|
|
|
#[test]
|
|
|
|
fn open_dir_is_ls() {
|
|
|
|
Playground::setup("open_dir", |dirs, sandbox| {
|
|
|
|
sandbox.with_files(vec![
|
|
|
|
EmptyFile("yehuda.txt"),
|
|
|
|
EmptyFile("jonathan.txt"),
|
|
|
|
EmptyFile("andres.txt"),
|
|
|
|
]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: dirs.test(), pipeline(
|
|
|
|
r#"
|
|
|
|
open .
|
2021-03-14 03:13:31 +01:00
|
|
|
| length
|
2021-03-13 22:47:31 +01:00
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "3");
|
|
|
|
})
|
|
|
|
}
|
2022-04-01 20:52:32 +02:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_open_block_command() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats",
|
|
|
|
r#"
|
|
|
|
def "from blockcommandparser" [] { lines | split column ",|," }
|
|
|
|
let values = (open sample.blockcommandparser)
|
|
|
|
echo ($values | get column1 | get 0)
|
|
|
|
echo ($values | get column2 | get 0)
|
|
|
|
echo ($values | get column1 | get 1)
|
|
|
|
echo ($values | get column2 | get 1)
|
|
|
|
"#
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(actual.out, "abcd")
|
|
|
|
}
|
2022-08-04 13:59:20 +02:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn open_ignore_ansi() {
|
|
|
|
Playground::setup("open_test_ansi", |dirs, sandbox| {
|
|
|
|
sandbox.with_files(vec![EmptyFile("nu.zion.txt")]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: dirs.test(), pipeline(
|
|
|
|
r#"
|
|
|
|
ls | find nu.zion | get 0 | get name | open $in
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert!(actual.err.is_empty());
|
|
|
|
})
|
|
|
|
}
|
2022-11-10 00:25:32 +01:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn open_no_parameter() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats",
|
|
|
|
r#"
|
|
|
|
open
|
|
|
|
"#
|
|
|
|
);
|
|
|
|
|
|
|
|
assert!(actual.err.contains("needs filename"));
|
|
|
|
}
|