2019-08-25 14:59:46 +02:00
|
|
|
mod helpers;
|
|
|
|
|
2019-08-29 02:32:42 +02:00
|
|
|
use helpers as h;
|
2019-08-29 08:31:56 +02:00
|
|
|
use helpers::{Playground, Stub::*};
|
2019-08-25 14:59:46 +02:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn recognizes_csv() {
|
2019-08-29 02:32:42 +02:00
|
|
|
Playground::setup("open_test_1", |dirs, sandbox| {
|
2019-08-27 23:45:18 +02:00
|
|
|
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
|
|
|
"nu.zion.csv",
|
|
|
|
r#"
|
2019-08-29 02:32:42 +02:00
|
|
|
author,lang,source
|
|
|
|
Jonathan Turner,Rust,New Zealand
|
|
|
|
Andres N. Robalino,Rust,Ecuador
|
|
|
|
Yehuda Katz,Rust,Estados Unidos
|
2019-08-27 23:45:18 +02:00
|
|
|
"#,
|
2019-08-29 02:32:42 +02:00
|
|
|
)]);
|
|
|
|
|
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: dirs.test(), h::pipeline(
|
2019-08-29 02:32:42 +02:00
|
|
|
r#"
|
2019-08-27 23:45:18 +02:00
|
|
|
open nu.zion.csv
|
|
|
|
| where author == "Andres N. Robalino"
|
|
|
|
| get source
|
2019-08-29 02:32:42 +02:00
|
|
|
| echo $it
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual, "Ecuador");
|
|
|
|
})
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2019-09-04 19:36:12 +02:00
|
|
|
// sample.bson has the following format:
|
|
|
|
// ━━━━━━━━━━┯━━━━━━━━━━━
|
|
|
|
// _id │ root
|
|
|
|
// ──────────┼───────────
|
|
|
|
// [object] │ [9 items]
|
|
|
|
// ━━━━━━━━━━┷━━━━━━━━━━━
|
|
|
|
//
|
|
|
|
// the root value is:
|
|
|
|
// ━━━┯━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━
|
|
|
|
// # │ _id │ a │ b │ c
|
|
|
|
// ───┼───────────────────┼─────────────────────────┼──────────┼──────────
|
|
|
|
// 0 │ [object] │ 1.000000000000000 │ hello │ [2 items]
|
|
|
|
// 1 │ [object] │ 42.00000000000000 │ whel │ hello
|
|
|
|
// 2 │ [object] │ [object] │ │
|
|
|
|
// 3 │ [object] │ │ [object] │
|
|
|
|
// 4 │ [object] │ │ │ [object]
|
|
|
|
// 5 │ [object] │ │ │ [object]
|
|
|
|
// 6 │ [object] │ [object] │ [object] │
|
|
|
|
// 7 │ [object] │ <date value> │ [object] │
|
|
|
|
// 8 │ 1.000000 │ <decimal value> │ [object] │
|
|
|
|
//
|
|
|
|
// The decimal value is supposed to be π, but is currently wrong due to
|
|
|
|
// what appears to be an issue in the bson library that is under investigation.
|
|
|
|
//
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
|
|
|
fn open_can_parse_bson_1() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2019-08-27 03:26:49 +02:00
|
|
|
"open sample.bson | get root | nth 0 | get b | echo $it"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
|
|
|
|
2019-08-29 02:32:42 +02:00
|
|
|
assert_eq!(actual, "hello");
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn open_can_parse_bson_2() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats", h::pipeline(
|
|
|
|
r#"
|
2019-08-27 23:45:18 +02:00
|
|
|
open sample.bson
|
|
|
|
| get root
|
|
|
|
| nth 6
|
|
|
|
| get b
|
|
|
|
| get '$binary_subtype'
|
2019-08-29 08:31:56 +02:00
|
|
|
| echo $it
|
|
|
|
"#
|
|
|
|
));
|
2019-08-29 02:32:42 +02:00
|
|
|
|
|
|
|
assert_eq!(actual, "function");
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2019-09-04 19:36:12 +02:00
|
|
|
// sample.db has the following format:
|
|
|
|
//
|
|
|
|
// ━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━
|
|
|
|
// # │ table_name │ table_values
|
|
|
|
// ───┼────────────┼──────────────
|
|
|
|
// 0 │ strings │ [6 items]
|
|
|
|
// 1 │ ints │ [5 items]
|
|
|
|
// 2 │ floats │ [4 items]
|
|
|
|
// ━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━
|
|
|
|
//
|
|
|
|
// In this case, this represents a sqlite database
|
|
|
|
// with three tables named `strings`, `ints`, and `floats`.
|
|
|
|
// The table_values represent the values for the tables:
|
|
|
|
//
|
|
|
|
// ━━━━┯━━━━━━━┯━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
|
|
// # │ x │ y │ z │ f
|
|
|
|
// ────┼───────┼──────────┼──────┼──────────────────────────────────────────────────────────────────────
|
|
|
|
// 0 │ hello │ <binary> │ │
|
|
|
|
// 1 │ hello │ <binary> │ │
|
|
|
|
// 2 │ hello │ <binary> │ │
|
|
|
|
// 3 │ hello │ <binary> │ │
|
|
|
|
// 4 │ world │ <binary> │ │
|
|
|
|
// 5 │ world │ <binary> │ │
|
|
|
|
// 6 │ │ │ 1 │
|
|
|
|
// 7 │ │ │ 42 │
|
|
|
|
// 8 │ │ │ 425 │
|
|
|
|
// 9 │ │ │ 4253 │
|
|
|
|
// 10 │ │ │ │
|
|
|
|
// 11 │ │ │ │ 3.400000000000000
|
|
|
|
// 12 │ │ │ │ 3.141592650000000
|
|
|
|
// 13 │ │ │ │ 23.00000000000000
|
|
|
|
// 14 │ │ │ │ this string that doesn't really belong here but sqlite is what it is
|
|
|
|
// ━━━━┷━━━━━━━┷━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
|
|
//
|
|
|
|
// We can see here that each table has different columns. `strings` has `x` and `y`, while
|
|
|
|
// `ints` has just `z`, and `floats` has only the column `f`. This means, in general, when working
|
|
|
|
// with sqlite, one will want to select a single table, e.g.:
|
|
|
|
//
|
|
|
|
// open sample.db | nth 1 | get table_values
|
|
|
|
// ━━━┯━━━━━━
|
|
|
|
// # │ z
|
|
|
|
// ───┼──────
|
|
|
|
// 0 │ 1
|
|
|
|
// 1 │ 42
|
|
|
|
// 2 │ 425
|
|
|
|
// 3 │ 4253
|
|
|
|
// 4 │
|
|
|
|
// ━━━┷━━━━━━
|
|
|
|
|
2019-08-27 23:45:18 +02:00
|
|
|
#[test]
|
|
|
|
fn open_can_parse_sqlite() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats", h::pipeline(
|
|
|
|
r#"
|
|
|
|
open sample.db
|
|
|
|
| get table_values
|
|
|
|
| nth 2
|
|
|
|
| get x
|
|
|
|
| echo $it"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual, "hello");
|
|
|
|
}
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
|
|
|
fn open_can_parse_toml() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2019-08-25 14:59:46 +02:00
|
|
|
"open cargo_sample.toml | get package.edition | echo $it"
|
|
|
|
);
|
|
|
|
|
2019-08-29 02:32:42 +02:00
|
|
|
assert_eq!(actual, "2018");
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
2019-08-29 11:02:16 +02:00
|
|
|
#[test]
|
|
|
|
fn open_can_parse_tsv() {
|
|
|
|
let actual = nu!(
|
|
|
|
cwd: "tests/fixtures/formats", h::pipeline(
|
|
|
|
r#"
|
|
|
|
open caco3_plastics.tsv
|
2019-08-27 23:45:18 +02:00
|
|
|
| first 1
|
|
|
|
| get origin
|
2019-08-29 11:02:16 +02:00
|
|
|
| echo $it
|
|
|
|
"#
|
|
|
|
));
|
|
|
|
|
|
|
|
assert_eq!(actual, "SPAIN")
|
|
|
|
}
|
|
|
|
|
2019-08-25 14:59:46 +02:00
|
|
|
#[test]
|
|
|
|
fn open_can_parse_json() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats", h::pipeline(
|
2019-08-29 02:32:42 +02:00
|
|
|
r#"
|
2019-08-27 23:45:18 +02:00
|
|
|
open sgml_description.json
|
|
|
|
| get glossary.GlossDiv.GlossList.GlossEntry.GlossSee
|
2019-08-29 02:32:42 +02:00
|
|
|
| echo $it
|
|
|
|
"#
|
|
|
|
));
|
2019-08-25 14:59:46 +02:00
|
|
|
|
2019-08-29 02:32:42 +02:00
|
|
|
assert_eq!(actual, "markup")
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn open_can_parse_xml() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2019-11-04 16:47:03 +01:00
|
|
|
"open jonathan.xml | get rss.channel | get item | get link | echo $it"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(
|
2019-08-29 02:32:42 +02:00
|
|
|
actual,
|
2019-08-25 14:59:46 +02:00
|
|
|
"http://www.jonathanturner.org/2015/10/off-to-new-adventures.html"
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn open_can_parse_ini() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2019-08-25 14:59:46 +02:00
|
|
|
"open sample.ini | get SectionOne.integer | echo $it"
|
|
|
|
);
|
|
|
|
|
2019-08-29 02:32:42 +02:00
|
|
|
assert_eq!(actual, "1234")
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn open_can_parse_utf16_ini() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
Overhaul the expansion system
The main thrust of this (very large) commit is an overhaul of the
expansion system.
The parsing pipeline is:
- Lightly parse the source file for atoms, basic delimiters and pipeline
structure into a token tree
- Expand the token tree into a HIR (high-level intermediate
representation) based upon the baseline syntax rules for expressions
and the syntactic shape of commands.
Somewhat non-traditionally, nu doesn't have an AST at all. It goes
directly from the token tree, which doesn't represent many important
distinctions (like the difference between `hello` and `5KB`) directly
into a high-level representation that doesn't have a direct
correspondence to the source code.
At a high level, nu commands work like macros, in the sense that the
syntactic shape of the invocation of a command depends on the
definition of a command.
However, commands do not have the ability to perform unrestricted
expansions of the token tree. Instead, they describe their arguments in
terms of syntactic shapes, and the expander expands the token tree into
HIR based upon that definition.
For example, the `where` command says that it takes a block as its first
required argument, and the description of the block syntactic shape
expands the syntax `cpu > 10` into HIR that represents
`{ $it.cpu > 10 }`.
This commit overhauls that system so that the syntactic shapes are
described in terms of a few new traits (`ExpandSyntax` and
`ExpandExpression` are the primary ones) that are more composable than
the previous system.
The first big win of this new system is the addition of the `ColumnPath`
shape, which looks like `cpu."max ghz"` or `package.version`.
Previously, while a variable path could look like `$it.cpu."max ghz"`,
the tail of a variable path could not be easily reused in other
contexts. Now, that tail is its own syntactic shape, and it can be used
as part of a command's signature.
This cleans up commands like `inc`, `add` and `edit` as well as
shorthand blocks, which can now look like `| where cpu."max ghz" > 10`
2019-09-18 00:26:27 +02:00
|
|
|
"open utf16.ini | get '.ShellClassInfo' | get IconIndex | echo $it"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
|
|
|
|
2019-08-29 02:32:42 +02:00
|
|
|
assert_eq!(actual, "-236")
|
2019-08-25 14:59:46 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn errors_if_file_not_found() {
|
2019-08-29 02:32:42 +02:00
|
|
|
let actual = nu_error!(
|
2019-08-29 08:31:56 +02:00
|
|
|
cwd: "tests/fixtures/formats",
|
2019-10-13 06:12:43 +02:00
|
|
|
"open i_dont_exist.txt"
|
2019-08-25 14:59:46 +02:00
|
|
|
);
|
|
|
|
|
2019-08-29 02:32:42 +02:00
|
|
|
assert!(actual.contains("File could not be opened"));
|
2019-10-22 15:08:24 +02:00
|
|
|
assert!(actual.contains("file not found"));
|
2019-08-26 20:19:05 +02:00
|
|
|
}
|