mirror of
https://github.com/nushell/nushell.git
synced 2025-07-01 23:22:10 +02:00
Compare commits
157 Commits
Author | SHA1 | Date | |
---|---|---|---|
a8425daf14 | |||
b629136528 | |||
91ebb7f718 | |||
96484161c0 | |||
d21ddeeae6 | |||
4322d373e6 | |||
08571392e6 | |||
f52235b1c1 | |||
a66147da47 | |||
df778afd1f | |||
d7ddaa376b | |||
2ce892c6f0 | |||
28179ef450 | |||
2c6336c806 | |||
761fc9ae73 | |||
314c3c4a97 | |||
f7f1fba94f | |||
14817ef229 | |||
98233dcec1 | |||
6540509911 | |||
594eae1cbc | |||
5e961815fc | |||
fa9329c8e3 | |||
6c577e18ca | |||
4034129dba | |||
52cf65c19e | |||
cbbb246a6d | |||
87cc6d6f01 | |||
4b9ef5a9d0 | |||
31c703891a | |||
550bda477b | |||
219b7e64cd | |||
98c59f77b2 | |||
e8800fdd0c | |||
09f903c37a | |||
57af9b5040 | |||
16272b1b20 | |||
1dcbd89a89 | |||
eb6ef02ad1 | |||
17586bdfbd | |||
0e98cf3f1e | |||
e2a95c3e1d | |||
5cb7df57fc | |||
88f899d341 | |||
7d70b5feda | |||
fd6ee03391 | |||
9f702fe01a | |||
c9d9eec7f8 | |||
38cbfdb8a9 | |||
f9b7376949 | |||
e98ed1b43d | |||
251c3e103d | |||
d26e938436 | |||
dbadf9499e | |||
28df1559ea | |||
91784218c0 | |||
eeec5e10c3 | |||
0515ed976c | |||
f653992b4a | |||
b5f8c1cc50 | |||
f9a46ce1e7 | |||
b6ba7f97fd | |||
7a47905f11 | |||
683f4c35d9 | |||
dfa5173cf4 | |||
04b214bef6 | |||
37cb7fec77 | |||
8833969e4a | |||
bda238267c | |||
d07dc57537 | |||
d0a2888e88 | |||
cec2eff933 | |||
38b7a3e32b | |||
9dfb6c023f | |||
cde92a9fb9 | |||
5622bbdd48 | |||
3d79a9c37a | |||
a2a5b30568 | |||
768adb84a4 | |||
26b0250e22 | |||
6893850fce | |||
8834e6905e | |||
1d5f13ddca | |||
d12c16a331 | |||
ecf47bb3ab | |||
a4bb5d4ff5 | |||
e9ee7bda46 | |||
1d196394f6 | |||
cfda67ff82 | |||
59510a85d1 | |||
35edf22ac3 | |||
871fc72892 | |||
1fcf671ca4 | |||
ecebe1314a | |||
bda5db59c8 | |||
4526d757b6 | |||
e5405d7f5c | |||
201506a5ad | |||
49f9253ca2 | |||
efc879b955 | |||
3fa03eb7a4 | |||
24bad78607 | |||
8de4c9dbb7 | |||
f858e854bf | |||
87dbd3d5ac | |||
fe66b4c8ea | |||
8390cc97e1 | |||
c0a7d4e2a7 | |||
ce23a672d9 | |||
9851317aeb | |||
3fb4a5d6e6 | |||
340e701124 | |||
36938a4407 | |||
6a6589a357 | |||
b94a32e523 | |||
7db3c69984 | |||
5406450c42 | |||
d6a6e16d21 | |||
ea1b65916d | |||
cd9d9ad50b | |||
552272b37e | |||
388ce738e3 | |||
ef7fbcbe9f | |||
80941ace37 | |||
f317500873 | |||
911414a190 | |||
cca6360bcc | |||
f68503fa21 | |||
911b69dff0 | |||
4115634bfc | |||
8a0bdde17a | |||
a1e21828d6 | |||
0f193c2337 | |||
526d94d862 | |||
2fdafa52b1 | |||
f52c0655c7 | |||
97331c7b25 | |||
1fb5a419a7 | |||
4e9afd6698 | |||
8f9dd6516e | |||
e4226def16 | |||
c199a84dbb | |||
5a4ca11362 | |||
f2968c8385 | |||
8d01b019f4 | |||
bf87330d6e | |||
2bb85bdbd4 | |||
8f34c6eeda | |||
ac5543bad9 | |||
e4c56a25c6 | |||
11ff8190b1 | |||
9bd25d7427 | |||
6bfb4207c4 | |||
c63ad610f5 | |||
e38a4323b4 | |||
d40aea5d0a | |||
fbb65cde44 |
@ -42,10 +42,10 @@ steps:
|
|||||||
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
||||||
rustup component add rustfmt --toolchain "stable"
|
rustup component add rustfmt --toolchain "stable"
|
||||||
displayName: Install Rust
|
displayName: Install Rust
|
||||||
- bash: RUSTFLAGS="-D warnings" cargo test --all-features
|
- bash: RUSTFLAGS="-D warnings" cargo test --all --features=stable
|
||||||
condition: eq(variables['style'], 'unflagged')
|
condition: eq(variables['style'], 'unflagged')
|
||||||
displayName: Run tests
|
displayName: Run tests
|
||||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all-features
|
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features=stable
|
||||||
condition: eq(variables['style'], 'canary')
|
condition: eq(variables['style'], 'canary')
|
||||||
displayName: Run tests
|
displayName: Run tests
|
||||||
- bash: cargo fmt --all -- --check
|
- bash: cargo fmt --all -- --check
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
image:
|
image:
|
||||||
file: .gitpod.Dockerfile
|
file: .gitpod.Dockerfile
|
||||||
tasks:
|
tasks:
|
||||||
- init: cargo install nu
|
- init: cargo install nu --features=stable
|
||||||
command: nu
|
command: nu
|
||||||
github:
|
github:
|
||||||
prebuilds:
|
prebuilds:
|
||||||
|
650
Cargo.lock
generated
650
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
211
Cargo.toml
211
Cargo.toml
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
version = "0.6.1"
|
version = "0.7.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
description = "A shell for the GitHub era"
|
description = "A shell for the GitHub era"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
@ -9,22 +9,62 @@ readme = "README.md"
|
|||||||
default-run = "nu"
|
default-run = "nu"
|
||||||
repository = "https://github.com/nushell/nushell"
|
repository = "https://github.com/nushell/nushell"
|
||||||
homepage = "https://www.nushell.sh"
|
homepage = "https://www.nushell.sh"
|
||||||
documentation = "https://book.nushell.sh"
|
documentation = "https://www.nushell.sh/book/"
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
|
|
||||||
members = ["crates/nu-source"]
|
members = [
|
||||||
|
"crates/nu-macros",
|
||||||
|
"crates/nu-errors",
|
||||||
|
"crates/nu-source",
|
||||||
|
"crates/nu_plugin_average",
|
||||||
|
"crates/nu_plugin_binaryview",
|
||||||
|
"crates/nu_plugin_fetch",
|
||||||
|
"crates/nu_plugin_inc",
|
||||||
|
"crates/nu_plugin_match",
|
||||||
|
"crates/nu_plugin_post",
|
||||||
|
"crates/nu_plugin_ps",
|
||||||
|
"crates/nu_plugin_str",
|
||||||
|
"crates/nu_plugin_sum",
|
||||||
|
"crates/nu_plugin_sys",
|
||||||
|
"crates/nu_plugin_textview",
|
||||||
|
"crates/nu_plugin_tree",
|
||||||
|
"crates/nu-protocol",
|
||||||
|
"crates/nu-parser",
|
||||||
|
"crates/nu-value-ext",
|
||||||
|
"crates/nu-build"
|
||||||
|
]
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-source = { version = "0.1.0", path = "./crates/nu-source" }
|
nu-source = { version = "0.7.0", path = "./crates/nu-source" }
|
||||||
|
nu-protocol = { version = "0.7.0", path = "./crates/nu-protocol" }
|
||||||
|
nu-errors = { version = "0.7.0", path = "./crates/nu-errors" }
|
||||||
|
nu-parser = { version = "0.7.0", path = "./crates/nu-parser" }
|
||||||
|
nu-value-ext = { version = "0.7.0", path = "./crates/nu-value-ext" }
|
||||||
|
nu_plugin_average = {version = "0.7.0", path = "./crates/nu_plugin_average", optional=true}
|
||||||
|
nu_plugin_binaryview = {version = "0.7.0", path = "./crates/nu_plugin_binaryview", optional=true}
|
||||||
|
nu_plugin_fetch = {version = "0.7.0", path = "./crates/nu_plugin_fetch", optional=true}
|
||||||
|
nu_plugin_inc = {version = "0.7.0", path = "./crates/nu_plugin_inc", optional=true}
|
||||||
|
nu_plugin_match = {version = "0.7.0", path = "./crates/nu_plugin_match", optional=true}
|
||||||
|
nu_plugin_post = {version = "0.7.0", path = "./crates/nu_plugin_post", optional=true}
|
||||||
|
nu_plugin_ps = {version = "0.7.0", path = "./crates/nu_plugin_ps", optional=true}
|
||||||
|
nu_plugin_str = {version = "0.7.0", path = "./crates/nu_plugin_str", optional=true}
|
||||||
|
nu_plugin_sum = {version = "0.7.0", path = "./crates/nu_plugin_sum", optional=true}
|
||||||
|
nu_plugin_sys = {version = "0.7.0", path = "./crates/nu_plugin_sys", optional=true}
|
||||||
|
nu_plugin_textview = {version = "0.7.0", path = "./crates/nu_plugin_textview", optional=true}
|
||||||
|
nu_plugin_tree = {version = "0.7.0", path = "./crates/nu_plugin_tree", optional=true}
|
||||||
|
nu-macros = { version = "0.7.0", path = "./crates/nu-macros" }
|
||||||
|
|
||||||
|
|
||||||
|
query_interface = "0.3.5"
|
||||||
|
typetag = "0.1.4"
|
||||||
rustyline = "5.0.4"
|
rustyline = "5.0.4"
|
||||||
chrono = { version = "0.4.9", features = ["serde"] }
|
chrono = { version = "0.4.10", features = ["serde"] }
|
||||||
derive-new = "0.5.8"
|
derive-new = "0.5.8"
|
||||||
prettytable-rs = "0.8.0"
|
prettytable-rs = "0.8.0"
|
||||||
itertools = "0.8.1"
|
itertools = "0.8.2"
|
||||||
ansi_term = "0.12.1"
|
ansi_term = "0.12.1"
|
||||||
nom = "5.0.1"
|
nom = "5.0.1"
|
||||||
dunce = "1.0.0"
|
dunce = "1.0.0"
|
||||||
@ -35,82 +75,82 @@ base64 = "0.11"
|
|||||||
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
||||||
async-stream = "0.1.2"
|
async-stream = "0.1.2"
|
||||||
futures_codec = "0.2.5"
|
futures_codec = "0.2.5"
|
||||||
num-traits = "0.2.8"
|
num-traits = "0.2.10"
|
||||||
term = "0.5.2"
|
term = "0.5.2"
|
||||||
bytes = "0.4.12"
|
bytes = "0.4.12"
|
||||||
log = "0.4.8"
|
log = "0.4.8"
|
||||||
pretty_env_logger = "0.3.1"
|
pretty_env_logger = "0.3.1"
|
||||||
serde = { version = "1.0.102", features = ["derive"] }
|
serde = { version = "1.0.103", features = ["derive"] }
|
||||||
bson = { version = "0.14.0", features = ["decimal128"] }
|
bson = { version = "0.14.0", features = ["decimal128"] }
|
||||||
serde_json = "1.0.41"
|
serde_json = "1.0.44"
|
||||||
serde-hjson = "0.9.1"
|
serde-hjson = "0.9.1"
|
||||||
serde_yaml = "0.8"
|
serde_yaml = "0.8"
|
||||||
serde_bytes = "0.11.2"
|
serde_bytes = "0.11.3"
|
||||||
getset = "0.0.9"
|
getset = "0.0.9"
|
||||||
language-reporting = "0.4.0"
|
language-reporting = "0.4.0"
|
||||||
app_dirs = "1.2.1"
|
app_dirs = "1.2.1"
|
||||||
csv = "1.1"
|
csv = "1.1"
|
||||||
toml = "0.5.5"
|
toml = "0.5.5"
|
||||||
clap = "2.33.0"
|
clap = "2.33.0"
|
||||||
git2 = { version = "0.10.1", default_features = false }
|
git2 = { version = "0.10.2", default_features = false }
|
||||||
dirs = "2.0.2"
|
dirs = "2.0.2"
|
||||||
glob = "0.3.0"
|
glob = "0.3.0"
|
||||||
ctrlc = "3.1.3"
|
ctrlc = "3.1.3"
|
||||||
surf = "1.0.3"
|
roxmltree = "0.7.3"
|
||||||
url = "2.1.0"
|
|
||||||
roxmltree = "0.7.2"
|
|
||||||
nom_locate = "1.0.0"
|
nom_locate = "1.0.0"
|
||||||
nom-tracable = "0.4.1"
|
nom-tracable = "0.4.1"
|
||||||
unicode-xid = "0.2.0"
|
unicode-xid = "0.2.0"
|
||||||
serde_ini = "0.2.0"
|
serde_ini = "0.2.0"
|
||||||
subprocess = "0.1.18"
|
subprocess = "0.1.18"
|
||||||
mime = "0.3.14"
|
|
||||||
pretty-hex = "0.1.1"
|
pretty-hex = "0.1.1"
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
tempfile = "3.1.0"
|
tempfile = "3.1.0"
|
||||||
semver = "0.9.0"
|
|
||||||
which = "3.1"
|
which = "3.1"
|
||||||
textwrap = {version = "0.11.0", features = ["term_size"]}
|
textwrap = {version = "0.11.0", features = ["term_size"]}
|
||||||
shellexpand = "1.0.0"
|
shellexpand = "1.0.0"
|
||||||
futures-timer = "2.0.0"
|
|
||||||
pin-utils = "0.1.0-alpha.4"
|
pin-utils = "0.1.0-alpha.4"
|
||||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||||
natural = "0.3.0"
|
|
||||||
serde_urlencoded = "0.6.1"
|
serde_urlencoded = "0.6.1"
|
||||||
sublime_fuzzy = "0.6"
|
|
||||||
trash = "1.0.0"
|
trash = "1.0.0"
|
||||||
regex = "1"
|
regex = "1"
|
||||||
cfg-if = "0.1"
|
cfg-if = "0.1"
|
||||||
strip-ansi-escapes = "0.1.0"
|
strip-ansi-escapes = "0.1.0"
|
||||||
calamine = "0.16"
|
calamine = "0.16"
|
||||||
umask = "0.1"
|
umask = "0.1"
|
||||||
futures-util = "0.3.0"
|
futures-util = "0.3.1"
|
||||||
pretty = "0.5.2"
|
|
||||||
termcolor = "1.0.5"
|
termcolor = "1.0.5"
|
||||||
console = "0.9.1"
|
natural = "0.3.0"
|
||||||
|
|
||||||
neso = { version = "0.5.0", optional = true }
|
|
||||||
crossterm = { version = "0.10.2", optional = true }
|
|
||||||
syntect = {version = "3.2.0", optional = true }
|
|
||||||
onig_sys = {version = "=69.1.0", optional = true }
|
|
||||||
heim = {version = "0.0.8", optional = true }
|
|
||||||
battery = {version = "0.7.4", optional = true }
|
|
||||||
rawkey = {version = "0.1.2", optional = true }
|
|
||||||
clipboard = {version = "0.5", optional = true }
|
clipboard = {version = "0.5", optional = true }
|
||||||
ptree = {version = "0.2" }
|
ptree = {version = "0.2" }
|
||||||
image = { version = "0.22.2", default_features = false, features = ["png_codec", "jpeg"], optional = true }
|
starship = { version = "0.28", optional = true}
|
||||||
starship = { version = "0.26.4", optional = true}
|
heim = {version = "0.0.9", optional = true}
|
||||||
|
battery = {version = "0.7.5", optional = true}
|
||||||
|
syntect = {version = "3.2.0", optional = true }
|
||||||
|
onig_sys = {version = "=69.1.0", optional = true }
|
||||||
|
crossterm = {version = "0.10.2", optional = true}
|
||||||
|
futures-timer = {version = "1.0.2", optional = true}
|
||||||
|
url = {version = "2.1.0", optional = true}
|
||||||
|
semver = {version = "0.9.0", optional = true}
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["textview", "sys", "ps"]
|
default = ["sys", "ps", "textview", "inc", "str"]
|
||||||
raw-key = ["rawkey", "neso"]
|
stable = ["sys", "ps", "starship-prompt", "textview", "binaryview", "match", "tree", "average", "sum"]
|
||||||
textview = ["syntect", "onig_sys", "crossterm"]
|
|
||||||
binaryview = ["image", "crossterm"]
|
|
||||||
sys = ["heim", "battery"]
|
sys = ["heim", "battery"]
|
||||||
ps = ["heim"]
|
ps = ["heim", "futures-timer"]
|
||||||
|
textview = ["crossterm", "syntect", "onig_sys", "url"]
|
||||||
|
str = []
|
||||||
|
|
||||||
|
inc = ["semver"]
|
||||||
starship-prompt = ["starship"]
|
starship-prompt = ["starship"]
|
||||||
# trace = ["nom-tracable/trace"]
|
binaryview = ["nu_plugin_binaryview"]
|
||||||
|
match = ["nu_plugin_match"]
|
||||||
|
tree = ["nu_plugin_tree"]
|
||||||
|
average = ["nu_plugin_average"]
|
||||||
|
sum = ["nu_plugin_sum"]
|
||||||
|
trace = ["nu-parser/trace"]
|
||||||
|
|
||||||
[dependencies.rusqlite]
|
[dependencies.rusqlite]
|
||||||
version = "0.20.0"
|
version = "0.20.0"
|
||||||
@ -118,89 +158,46 @@ features = ["bundled", "blob"]
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
pretty_assertions = "0.6.1"
|
pretty_assertions = "0.6.1"
|
||||||
|
nu-test-support = { version = "0.7.0", path = "./crates/nu-test-support" }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
toml = "0.5.5"
|
toml = "0.5.5"
|
||||||
serde = { version = "1.0.102", features = ["derive"] }
|
serde = { version = "1.0.103", features = ["derive"] }
|
||||||
|
nu-build = { version = "0.7.0", path = "./crates/nu-build" }
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
path = "src/lib.rs"
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
# Core plugins that ship with `cargo install nu` by default
|
||||||
|
# Currently, Cargo limits us to installing only one binary
|
||||||
|
# unless we use [[bin]], so we use this as a workaround
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "nu_plugin_inc"
|
name = "nu_plugin_core_textview"
|
||||||
path = "src/plugins/inc.rs"
|
path = "src/plugins/nu_plugin_core_textview.rs"
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_sum"
|
|
||||||
path = "src/plugins/sum.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_average"
|
|
||||||
path = "src/plugins/average.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_embed"
|
|
||||||
path = "src/plugins/embed.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_insert"
|
|
||||||
path = "src/plugins/insert.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_edit"
|
|
||||||
path = "src/plugins/edit.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_format"
|
|
||||||
path = "src/plugins/format.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_parse"
|
|
||||||
path = "src/plugins/parse.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_str"
|
|
||||||
path = "src/plugins/str.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_skip"
|
|
||||||
path = "src/plugins/skip.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_match"
|
|
||||||
path = "src/plugins/match.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_sys"
|
|
||||||
path = "src/plugins/sys.rs"
|
|
||||||
required-features = ["sys"]
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_ps"
|
|
||||||
path = "src/plugins/ps.rs"
|
|
||||||
required-features = ["ps"]
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_tree"
|
|
||||||
path = "src/plugins/tree.rs"
|
|
||||||
required-features = ["tree"]
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_binaryview"
|
|
||||||
path = "src/plugins/binaryview.rs"
|
|
||||||
required-features = ["binaryview"]
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "nu_plugin_textview"
|
|
||||||
path = "src/plugins/textview.rs"
|
|
||||||
required-features = ["textview"]
|
required-features = ["textview"]
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "nu_plugin_docker"
|
name = "nu_plugin_core_inc"
|
||||||
path = "src/plugins/docker.rs"
|
path = "src/plugins/nu_plugin_core_inc.rs"
|
||||||
required-features = ["docker"]
|
required-features = ["inc"]
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "nu_plugin_core_ps"
|
||||||
|
path = "src/plugins/nu_plugin_core_ps.rs"
|
||||||
|
required-features = ["ps"]
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "nu_plugin_core_str"
|
||||||
|
path = "src/plugins/nu_plugin_core_str.rs"
|
||||||
|
required-features = ["str"]
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "nu_plugin_core_sys"
|
||||||
|
path = "src/plugins/nu_plugin_core_sys.rs"
|
||||||
|
required-features = ["sys"]
|
||||||
|
|
||||||
|
# Main nu binary
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
10
README.md
10
README.md
@ -18,9 +18,9 @@ Nu comes with a set of built-in commands (listed below). If a command is unknown
|
|||||||
|
|
||||||
# Learning more
|
# Learning more
|
||||||
|
|
||||||
There are a few good resources to learn about Nu. There is a [book](https://book.nushell.sh) about Nu that is currently in progress. The book focuses on using Nu and its core concepts.
|
There are a few good resources to learn about Nu. There is a [book](https://www.nushell.sh/book/) about Nu that is currently in progress. The book focuses on using Nu and its core concepts.
|
||||||
|
|
||||||
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://github.com/nushell/contributor-book/tree/master/en) to help you get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://www.nushell.sh/contributor-book/) to help you get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
||||||
|
|
||||||
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
|
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
|
||||||
|
|
||||||
@ -32,7 +32,7 @@ Try it in Gitpod.
|
|||||||
|
|
||||||
## Local
|
## Local
|
||||||
|
|
||||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
Up-to-date installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/en/installation.html). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
||||||
|
|
||||||
To build Nu, you will need to use the **latest stable (1.39 or later)** version of the compiler.
|
To build Nu, you will need to use the **latest stable (1.39 or later)** version of the compiler.
|
||||||
|
|
||||||
@ -52,10 +52,10 @@ To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs
|
|||||||
cargo install nu
|
cargo install nu
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform):
|
You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://www.nushell.sh/book/en/installation.html#dependencies) for your platform):
|
||||||
|
|
||||||
```
|
```
|
||||||
cargo install nu --all-features
|
cargo install nu --features=stable
|
||||||
```
|
```
|
||||||
|
|
||||||
## Docker
|
## Docker
|
||||||
|
4
TODO.md
4
TODO.md
@ -46,3 +46,7 @@ Unify dictionary building, probably around a macro
|
|||||||
sys plugin in own crate
|
sys plugin in own crate
|
||||||
|
|
||||||
textview in own crate
|
textview in own crate
|
||||||
|
|
||||||
|
Combine atomic and atomic_parse in parser
|
||||||
|
|
||||||
|
at_end_possible_ws needs to be comment and separator sensitive
|
||||||
|
38
build.rs
38
build.rs
@ -1,39 +1,3 @@
|
|||||||
use serde::Deserialize;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::collections::HashSet;
|
|
||||||
use std::env;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct Feature {
|
|
||||||
#[allow(unused)]
|
|
||||||
description: String,
|
|
||||||
enabled: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
let input = env::var("CARGO_MANIFEST_DIR").unwrap();
|
nu_build::build()
|
||||||
let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok();
|
|
||||||
let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS")
|
|
||||||
.map(|s| s.split(",").map(|s| s.to_string()).collect())
|
|
||||||
.unwrap_or_else(|_| HashSet::new());
|
|
||||||
|
|
||||||
if all_on && !flags.is_empty() {
|
|
||||||
println!(
|
|
||||||
"cargo:warning={}",
|
|
||||||
"Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both."
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = Path::new(&input).join("features.toml");
|
|
||||||
|
|
||||||
let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?;
|
|
||||||
|
|
||||||
for (key, value) in toml.iter() {
|
|
||||||
if value.enabled == true || all_on || flags.contains(key) {
|
|
||||||
println!("cargo:rustc-cfg={}", key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
15
crates/nu-build/Cargo.toml
Normal file
15
crates/nu-build/Cargo.toml
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
[package]
|
||||||
|
name = "nu-build"
|
||||||
|
version = "0.7.0"
|
||||||
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
|
edition = "2018"
|
||||||
|
description = "Core build system for nushell"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
serde = { version = "1.0.103", features = ["derive"] }
|
||||||
|
lazy_static = "1.4.0"
|
||||||
|
serde_json = "1.0.44"
|
||||||
|
toml = "0.5.5"
|
81
crates/nu-build/src/lib.rs
Normal file
81
crates/nu-build/src/lib.rs
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
use lazy_static::lazy_static;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use std::env;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Mutex;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref WORKSPACES: Mutex<BTreeMap<String, &'static Path>> = Mutex::new(BTreeMap::new());
|
||||||
|
}
|
||||||
|
|
||||||
|
// got from https://github.com/mitsuhiko/insta/blob/b113499249584cb650150d2d01ed96ee66db6b30/src/runtime.rs#L67-L88
|
||||||
|
|
||||||
|
fn get_cargo_workspace(manifest_dir: &str) -> Option<&Path> {
|
||||||
|
let mut workspaces = WORKSPACES.lock().unwrap();
|
||||||
|
if let Some(rv) = workspaces.get(manifest_dir) {
|
||||||
|
Some(rv)
|
||||||
|
} else {
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct Manifest {
|
||||||
|
workspace_root: String,
|
||||||
|
}
|
||||||
|
let output = std::process::Command::new(env!("CARGO"))
|
||||||
|
.arg("metadata")
|
||||||
|
.arg("--format-version=1")
|
||||||
|
.current_dir(manifest_dir)
|
||||||
|
.output()
|
||||||
|
.unwrap();
|
||||||
|
let manifest: Manifest = serde_json::from_slice(&output.stdout).unwrap();
|
||||||
|
let path = Box::leak(Box::new(PathBuf::from(manifest.workspace_root)));
|
||||||
|
workspaces.insert(manifest_dir.to_string(), path.as_path());
|
||||||
|
workspaces.get(manifest_dir).map(|w| *w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct Feature {
|
||||||
|
#[allow(unused)]
|
||||||
|
description: String,
|
||||||
|
enabled: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
let input = env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||||
|
|
||||||
|
let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok();
|
||||||
|
let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS")
|
||||||
|
.map(|s| s.split(",").map(|s| s.to_string()).collect())
|
||||||
|
.unwrap_or_else(|_| HashSet::new());
|
||||||
|
|
||||||
|
if all_on && !flags.is_empty() {
|
||||||
|
println!(
|
||||||
|
"cargo:warning=Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let workspace = match get_cargo_workspace(&input) {
|
||||||
|
// If the crate is being downloaded from crates.io, it won't have a workspace root, and that's ok
|
||||||
|
None => return Ok(()),
|
||||||
|
Some(workspace) => workspace,
|
||||||
|
};
|
||||||
|
|
||||||
|
let path = Path::new(&workspace).join("features.toml");
|
||||||
|
|
||||||
|
// If the crate is being downloaded from crates.io, it won't have a features.toml, and that's ok
|
||||||
|
if !path.exists() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?;
|
||||||
|
|
||||||
|
for (key, value) in toml.iter() {
|
||||||
|
if value.enabled == true || all_on || flags.contains(key) {
|
||||||
|
println!("cargo:rustc-cfg={}", key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
31
crates/nu-errors/Cargo.toml
Normal file
31
crates/nu-errors/Cargo.toml
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
[package]
|
||||||
|
name = "nu-errors"
|
||||||
|
version = "0.7.0"
|
||||||
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
|
edition = "2018"
|
||||||
|
description = "Core error subsystem for Nushell"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-source = { path = "../nu-source", version = "0.7.0" }
|
||||||
|
|
||||||
|
ansi_term = "0.12.1"
|
||||||
|
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||||
|
derive-new = "0.5.8"
|
||||||
|
language-reporting = "0.4.0"
|
||||||
|
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||||
|
num-traits = "0.2.10"
|
||||||
|
serde = { version = "1.0.103", features = ["derive"] }
|
||||||
|
nom = "5.0.1"
|
||||||
|
nom_locate = "1.0.0"
|
||||||
|
|
||||||
|
# implement conversions
|
||||||
|
subprocess = "0.1.18"
|
||||||
|
serde_yaml = "0.8"
|
||||||
|
toml = "0.5.5"
|
||||||
|
serde_json = "1.0.44"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
nu-build = { version = "0.7.0", path = "../nu-build" }
|
3
crates/nu-errors/build.rs
Normal file
3
crates/nu-errors/build.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
nu_build::build()
|
||||||
|
}
|
@ -1,71 +1,63 @@
|
|||||||
use crate::prelude::*;
|
|
||||||
|
|
||||||
use ansi_term::Color;
|
use ansi_term::Color;
|
||||||
|
use bigdecimal::BigDecimal;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use language_reporting::{Diagnostic, Label, Severity};
|
use language_reporting::{Diagnostic, Label, Severity};
|
||||||
use nu_source::{Spanned, TracableContext};
|
use nu_source::{b, DebugDocBuilder, PrettyDebug, Span, Spanned, SpannedItem, TracableContext};
|
||||||
|
use num_bigint::BigInt;
|
||||||
|
use num_traits::ToPrimitive;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
|
||||||
// TODO: Spanned<T> -> HasSpanAndItem<T> ?
|
/// A structured reason for a ParseError. Note that parsing in nu is more like macro expansion in
|
||||||
|
/// other languages, so the kinds of errors that can occur during parsing are more contextual than
|
||||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
/// you might expect.
|
||||||
pub enum Description {
|
|
||||||
Source(Spanned<String>),
|
|
||||||
Synthetic(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Description {
|
|
||||||
fn from_spanned(item: Spanned<impl Into<String>>) -> Description {
|
|
||||||
Description::Source(item.map(|s| s.into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_label(self) -> Result<Label<Span>, String> {
|
|
||||||
match self {
|
|
||||||
Description::Source(s) => Ok(Label::new_primary(s.span).with_message(s.item)),
|
|
||||||
Description::Synthetic(s) => Err(s),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for Description {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
match self {
|
|
||||||
Description::Source(s) => b::description(&s.item),
|
|
||||||
Description::Synthetic(s) => b::description(s),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum ParseErrorReason {
|
pub enum ParseErrorReason {
|
||||||
Eof {
|
/// The parser encountered an EOF rather than what it was expecting
|
||||||
expected: &'static str,
|
Eof { expected: &'static str, span: Span },
|
||||||
span: Span,
|
/// The parser expected to see the end of a token stream (possibly the token
|
||||||
},
|
/// stream from inside a delimited token node), but found something else.
|
||||||
|
ExtraTokens { actual: Spanned<String> },
|
||||||
|
/// The parser encountered something other than what it was expecting
|
||||||
Mismatch {
|
Mismatch {
|
||||||
expected: &'static str,
|
expected: &'static str,
|
||||||
actual: Spanned<String>,
|
actual: Spanned<String>,
|
||||||
},
|
},
|
||||||
|
/// The parser tried to parse an argument for a command, but it failed for
|
||||||
|
/// some reason
|
||||||
ArgumentError {
|
ArgumentError {
|
||||||
command: Spanned<String>,
|
command: Spanned<String>,
|
||||||
error: ArgumentError,
|
error: ArgumentError,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A newtype for `ParseErrorReason`
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ParseError {
|
pub struct ParseError {
|
||||||
reason: ParseErrorReason,
|
reason: ParseErrorReason,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ParseError {
|
impl ParseError {
|
||||||
|
/// Construct a [ParseErrorReason::Eof](ParseErrorReason::Eof)
|
||||||
pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError {
|
pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError {
|
||||||
ParseError {
|
ParseError {
|
||||||
reason: ParseErrorReason::Eof { expected, span },
|
reason: ParseErrorReason::Eof { expected, span },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Construct a [ParseErrorReason::ExtraTokens](ParseErrorReason::ExtraTokens)
|
||||||
|
pub fn extra_tokens(actual: Spanned<impl Into<String>>) -> ParseError {
|
||||||
|
let Spanned { span, item } = actual;
|
||||||
|
|
||||||
|
ParseError {
|
||||||
|
reason: ParseErrorReason::ExtraTokens {
|
||||||
|
actual: item.into().spanned(span),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a [ParseErrorReason::Mismatch](ParseErrorReason::Mismatch)
|
||||||
pub fn mismatch(expected: &'static str, actual: Spanned<impl Into<String>>) -> ParseError {
|
pub fn mismatch(expected: &'static str, actual: Spanned<impl Into<String>>) -> ParseError {
|
||||||
let Spanned { span, item } = actual;
|
let Spanned { span, item } = actual;
|
||||||
|
|
||||||
@ -77,6 +69,7 @@ impl ParseError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Construct a [ParseErrorReason::ArgumentError](ParseErrorReason::ArgumentError)
|
||||||
pub fn argument_error(command: Spanned<impl Into<String>>, kind: ArgumentError) -> ParseError {
|
pub fn argument_error(command: Spanned<impl Into<String>>, kind: ArgumentError) -> ParseError {
|
||||||
ParseError {
|
ParseError {
|
||||||
reason: ParseErrorReason::ArgumentError {
|
reason: ParseErrorReason::ArgumentError {
|
||||||
@ -87,10 +80,14 @@ impl ParseError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert a [ParseError](ParseError) into a [ShellError](ShellError)
|
||||||
impl From<ParseError> for ShellError {
|
impl From<ParseError> for ShellError {
|
||||||
fn from(error: ParseError) -> ShellError {
|
fn from(error: ParseError) -> ShellError {
|
||||||
match error.reason {
|
match error.reason {
|
||||||
ParseErrorReason::Eof { expected, span } => ShellError::unexpected_eof(expected, span),
|
ParseErrorReason::Eof { expected, span } => ShellError::unexpected_eof(expected, span),
|
||||||
|
ParseErrorReason::ExtraTokens { actual } => {
|
||||||
|
ShellError::type_error("nothing", actual.clone())
|
||||||
|
}
|
||||||
ParseErrorReason::Mismatch { actual, expected } => {
|
ParseErrorReason::Mismatch { actual, expected } => {
|
||||||
ShellError::type_error(expected, actual.clone())
|
ShellError::type_error(expected, actual.clone())
|
||||||
}
|
}
|
||||||
@ -101,11 +98,20 @@ impl From<ParseError> for ShellError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// ArgumentError describes various ways that the parser could fail because of unexpected arguments.
|
||||||
|
/// Nu commands are like a combination of functions and macros, and these errors correspond to
|
||||||
|
/// problems that could be identified during expansion based on the syntactic signature of a
|
||||||
|
/// command.
|
||||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, Hash, PartialOrd, Serialize, Deserialize)]
|
#[derive(Debug, Eq, PartialEq, Clone, Ord, Hash, PartialOrd, Serialize, Deserialize)]
|
||||||
pub enum ArgumentError {
|
pub enum ArgumentError {
|
||||||
|
/// The command specified a mandatory flag, but it was missing.
|
||||||
MissingMandatoryFlag(String),
|
MissingMandatoryFlag(String),
|
||||||
|
/// The command specified a mandatory positional argument, but it was missing.
|
||||||
MissingMandatoryPositional(String),
|
MissingMandatoryPositional(String),
|
||||||
|
/// A flag was found, and it should have been followed by a value, but no value was found
|
||||||
MissingValueForName(String),
|
MissingValueForName(String),
|
||||||
|
/// A sequence of characters was found that was not syntactically valid (but would have
|
||||||
|
/// been valid if the command was an external command)
|
||||||
InvalidExternalWord,
|
InvalidExternalWord,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,12 +138,16 @@ impl PrettyDebug for ArgumentError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A `ShellError` is a proximate error and a possible cause, which could have its own cause,
|
||||||
|
/// creating a cause chain.
|
||||||
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Serialize, Deserialize, Hash)]
|
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Serialize, Deserialize, Hash)]
|
||||||
pub struct ShellError {
|
pub struct ShellError {
|
||||||
error: ProximateShellError,
|
error: ProximateShellError,
|
||||||
cause: Option<Box<ProximateShellError>>,
|
cause: Option<Box<ShellError>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// `PrettyDebug` is for internal debugging. For user-facing debugging, [to_diagnostic](ShellError::to_diagnostic)
|
||||||
|
/// is used, which prints an error, highlighting spans.
|
||||||
impl PrettyDebug for ShellError {
|
impl PrettyDebug for ShellError {
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
match &self.error {
|
match &self.error {
|
||||||
@ -170,12 +180,12 @@ impl PrettyDebug for ShellError {
|
|||||||
"(",
|
"(",
|
||||||
b::description("expr:")
|
b::description("expr:")
|
||||||
+ b::space()
|
+ b::space()
|
||||||
+ expr.pretty()
|
+ b::description(&expr.item)
|
||||||
+ b::description(",")
|
+ b::description(",")
|
||||||
+ b::space()
|
+ b::space()
|
||||||
+ b::description("subpath:")
|
+ b::description("subpath:")
|
||||||
+ b::space()
|
+ b::space()
|
||||||
+ subpath.pretty(),
|
+ b::description(&subpath.item),
|
||||||
")",
|
")",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -184,7 +194,7 @@ impl PrettyDebug for ShellError {
|
|||||||
+ b::space()
|
+ b::space()
|
||||||
+ b::delimit(
|
+ b::delimit(
|
||||||
"(",
|
"(",
|
||||||
b::description("subpath:") + b::space() + subpath.pretty(),
|
b::description("subpath:") + b::space() + b::description(&subpath.item),
|
||||||
")",
|
")",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -294,8 +304,8 @@ impl ShellError {
|
|||||||
expr: Spanned<impl Into<String>>,
|
expr: Spanned<impl Into<String>>,
|
||||||
) -> ShellError {
|
) -> ShellError {
|
||||||
ProximateShellError::MissingProperty {
|
ProximateShellError::MissingProperty {
|
||||||
subpath: Description::from_spanned(subpath),
|
subpath: subpath.map(|s| s.into()),
|
||||||
expr: Description::from_spanned(expr),
|
expr: expr.map(|e| e.into()),
|
||||||
}
|
}
|
||||||
.start()
|
.start()
|
||||||
}
|
}
|
||||||
@ -305,7 +315,7 @@ impl ShellError {
|
|||||||
integer: impl Into<Span>,
|
integer: impl Into<Span>,
|
||||||
) -> ShellError {
|
) -> ShellError {
|
||||||
ProximateShellError::InvalidIntegerIndex {
|
ProximateShellError::InvalidIntegerIndex {
|
||||||
subpath: Description::from_spanned(subpath),
|
subpath: subpath.map(|s| s.into()),
|
||||||
integer: integer.into(),
|
integer: integer.into(),
|
||||||
}
|
}
|
||||||
.start()
|
.start()
|
||||||
@ -318,7 +328,7 @@ impl ShellError {
|
|||||||
.start()
|
.start()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn unexpected_eof(expected: impl Into<String>, span: impl Into<Span>) -> ShellError {
|
pub fn unexpected_eof(expected: impl Into<String>, span: impl Into<Span>) -> ShellError {
|
||||||
ProximateShellError::UnexpectedEof {
|
ProximateShellError::UnexpectedEof {
|
||||||
expected: expected.into(),
|
expected: expected.into(),
|
||||||
span: span.into(),
|
span: span.into(),
|
||||||
@ -326,7 +336,7 @@ impl ShellError {
|
|||||||
.start()
|
.start()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn range_error(
|
pub fn range_error(
|
||||||
expected: impl Into<ExpectedRange>,
|
expected: impl Into<ExpectedRange>,
|
||||||
actual: &Spanned<impl fmt::Debug>,
|
actual: &Spanned<impl fmt::Debug>,
|
||||||
operation: impl Into<String>,
|
operation: impl Into<String>,
|
||||||
@ -339,14 +349,14 @@ impl ShellError {
|
|||||||
.start()
|
.start()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn syntax_error(problem: Spanned<impl Into<String>>) -> ShellError {
|
pub fn syntax_error(problem: Spanned<impl Into<String>>) -> ShellError {
|
||||||
ProximateShellError::SyntaxError {
|
ProximateShellError::SyntaxError {
|
||||||
problem: problem.map(|p| p.into()),
|
problem: problem.map(|p| p.into()),
|
||||||
}
|
}
|
||||||
.start()
|
.start()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn coerce_error(
|
pub fn coerce_error(
|
||||||
left: Spanned<impl Into<String>>,
|
left: Spanned<impl Into<String>>,
|
||||||
right: Spanned<impl Into<String>>,
|
right: Spanned<impl Into<String>>,
|
||||||
) -> ShellError {
|
) -> ShellError {
|
||||||
@ -357,10 +367,7 @@ impl ShellError {
|
|||||||
.start()
|
.start()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn argument_error(
|
pub fn argument_error(command: Spanned<impl Into<String>>, kind: ArgumentError) -> ShellError {
|
||||||
command: Spanned<impl Into<String>>,
|
|
||||||
kind: ArgumentError,
|
|
||||||
) -> ShellError {
|
|
||||||
ProximateShellError::ArgumentError {
|
ProximateShellError::ArgumentError {
|
||||||
command: command.map(|c| c.into()),
|
command: command.map(|c| c.into()),
|
||||||
error: kind,
|
error: kind,
|
||||||
@ -368,7 +375,7 @@ impl ShellError {
|
|||||||
.start()
|
.start()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_error(
|
pub fn parse_error(
|
||||||
error: nom::Err<(
|
error: nom::Err<(
|
||||||
nom_locate::LocatedSpanEx<&str, TracableContext>,
|
nom_locate::LocatedSpanEx<&str, TracableContext>,
|
||||||
nom::error::ErrorKind,
|
nom::error::ErrorKind,
|
||||||
@ -395,11 +402,11 @@ impl ShellError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn diagnostic(diagnostic: Diagnostic<Span>) -> ShellError {
|
pub fn diagnostic(diagnostic: Diagnostic<Span>) -> ShellError {
|
||||||
ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start()
|
ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn to_diagnostic(self) -> Diagnostic<Span> {
|
pub fn to_diagnostic(self) -> Diagnostic<Span> {
|
||||||
match self.error {
|
match self.error {
|
||||||
ProximateShellError::MissingValue { span, reason } => {
|
ProximateShellError::MissingValue { span, reason } => {
|
||||||
let mut d = Diagnostic::new(
|
let mut d = Diagnostic::new(
|
||||||
@ -491,7 +498,7 @@ impl ShellError {
|
|||||||
Label::new_primary(span).with_message(format!(
|
Label::new_primary(span).with_message(format!(
|
||||||
"Expected to convert {} to {} while {}, but it was out of range",
|
"Expected to convert {} to {} while {}, but it was out of range",
|
||||||
item,
|
item,
|
||||||
kind.desc(),
|
kind.display(),
|
||||||
operation
|
operation
|
||||||
)),
|
)),
|
||||||
),
|
),
|
||||||
@ -506,31 +513,33 @@ impl ShellError {
|
|||||||
.with_label(Label::new_primary(span).with_message(item)),
|
.with_label(Label::new_primary(span).with_message(item)),
|
||||||
|
|
||||||
ProximateShellError::MissingProperty { subpath, expr, .. } => {
|
ProximateShellError::MissingProperty { subpath, expr, .. } => {
|
||||||
let subpath = subpath.into_label();
|
|
||||||
let expr = expr.into_label();
|
|
||||||
|
|
||||||
let mut diag = Diagnostic::new(Severity::Error, "Missing property");
|
let mut diag = Diagnostic::new(Severity::Error, "Missing property");
|
||||||
|
|
||||||
match subpath {
|
if subpath.span == Span::unknown() {
|
||||||
Ok(label) => diag = diag.with_label(label),
|
diag.message = format!("Missing property (for {})", subpath.item);
|
||||||
Err(ty) => diag.message = format!("Missing property (for {})", ty),
|
} else {
|
||||||
}
|
let subpath = Label::new_primary(subpath.span).with_message(subpath.item);
|
||||||
|
diag = diag.with_label(subpath);
|
||||||
|
|
||||||
|
if expr.span != Span::unknown() {
|
||||||
|
let expr = Label::new_primary(expr.span).with_message(expr.item);
|
||||||
|
diag = diag.with_label(expr)
|
||||||
|
}
|
||||||
|
|
||||||
if let Ok(label) = expr {
|
|
||||||
diag = diag.with_label(label);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
diag
|
diag
|
||||||
}
|
}
|
||||||
|
|
||||||
ProximateShellError::InvalidIntegerIndex { subpath,integer } => {
|
ProximateShellError::InvalidIntegerIndex { subpath,integer } => {
|
||||||
let subpath = subpath.into_label();
|
|
||||||
|
|
||||||
let mut diag = Diagnostic::new(Severity::Error, "Invalid integer property");
|
let mut diag = Diagnostic::new(Severity::Error, "Invalid integer property");
|
||||||
|
|
||||||
match subpath {
|
if subpath.span == Span::unknown() {
|
||||||
Ok(label) => diag = diag.with_label(label),
|
diag.message = format!("Invalid integer property (for {})", subpath.item)
|
||||||
Err(ty) => diag.message = format!("Invalid integer property (for {})", ty)
|
} else {
|
||||||
|
let label = Label::new_primary(subpath.span).with_message(subpath.item);
|
||||||
|
diag = diag.with_label(label)
|
||||||
}
|
}
|
||||||
|
|
||||||
diag = diag.with_label(Label::new_secondary(integer).with_message("integer"));
|
diag = diag.with_label(Label::new_secondary(integer).with_message("integer"));
|
||||||
@ -579,23 +588,19 @@ impl ShellError {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub fn string(title: impl Into<String>) -> ShellError {
|
pub fn unimplemented(title: impl Into<String>) -> ShellError {
|
||||||
// ProximateShellError::String(StringError::new(title.into(), String::new())).start()
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// pub(crate) fn unreachable(title: impl Into<String>) -> ShellError {
|
|
||||||
// ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into()))
|
|
||||||
// }
|
|
||||||
|
|
||||||
pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError {
|
|
||||||
ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
|
ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn unexpected(title: impl Into<String>) -> ShellError {
|
pub fn unexpected(title: impl Into<String>) -> ShellError {
|
||||||
ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into()))
|
ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// `ExpectedRange` describes a range of values that was expected by a command. In addition
|
||||||
|
/// to typical ranges, this enum allows an error to specify that the range of allowed values
|
||||||
|
/// corresponds to a particular numeric type (which is a dominant use-case for the
|
||||||
|
/// [RangeError](ProximateShellError::RangeError) error type).
|
||||||
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Serialize, Deserialize)]
|
||||||
pub enum ExpectedRange {
|
pub enum ExpectedRange {
|
||||||
I8,
|
I8,
|
||||||
@ -617,6 +622,7 @@ pub enum ExpectedRange {
|
|||||||
Range { start: usize, end: usize },
|
Range { start: usize, end: usize },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert a Rust range into an [ExpectedRange](ExpectedRange).
|
||||||
impl From<Range<usize>> for ExpectedRange {
|
impl From<Range<usize>> for ExpectedRange {
|
||||||
fn from(range: Range<usize>) -> Self {
|
fn from(range: Range<usize>) -> Self {
|
||||||
ExpectedRange::Range {
|
ExpectedRange::Range {
|
||||||
@ -628,13 +634,7 @@ impl From<Range<usize>> for ExpectedRange {
|
|||||||
|
|
||||||
impl PrettyDebug for ExpectedRange {
|
impl PrettyDebug for ExpectedRange {
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
b::description(self.desc())
|
b::description(match self {
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpectedRange {
|
|
||||||
fn desc(&self) -> String {
|
|
||||||
match self {
|
|
||||||
ExpectedRange::I8 => "an 8-bit signed integer",
|
ExpectedRange::I8 => "an 8-bit signed integer",
|
||||||
ExpectedRange::I16 => "a 16-bit signed integer",
|
ExpectedRange::I16 => "a 16-bit signed integer",
|
||||||
ExpectedRange::I32 => "a 32-bit signed integer",
|
ExpectedRange::I32 => "a 32-bit signed integer",
|
||||||
@ -651,9 +651,10 @@ impl ExpectedRange {
|
|||||||
ExpectedRange::Size => "a list offset",
|
ExpectedRange::Size => "a list offset",
|
||||||
ExpectedRange::BigDecimal => "a decimal",
|
ExpectedRange::BigDecimal => "a decimal",
|
||||||
ExpectedRange::BigInt => "an integer",
|
ExpectedRange::BigInt => "an integer",
|
||||||
ExpectedRange::Range { start, end } => return format!("{} to {}", start, end),
|
ExpectedRange::Range { start, end } => {
|
||||||
}
|
return b::description(format!("{} to {}", start, end))
|
||||||
.to_string()
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -671,11 +672,11 @@ pub enum ProximateShellError {
|
|||||||
actual: Spanned<Option<String>>,
|
actual: Spanned<Option<String>>,
|
||||||
},
|
},
|
||||||
MissingProperty {
|
MissingProperty {
|
||||||
subpath: Description,
|
subpath: Spanned<String>,
|
||||||
expr: Description,
|
expr: Spanned<String>,
|
||||||
},
|
},
|
||||||
InvalidIntegerIndex {
|
InvalidIntegerIndex {
|
||||||
subpath: Description,
|
subpath: Spanned<String>,
|
||||||
integer: Span,
|
integer: Span,
|
||||||
},
|
},
|
||||||
MissingValue {
|
MissingValue {
|
12
crates/nu-macros/Cargo.toml
Normal file
12
crates/nu-macros/Cargo.toml
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
[package]
|
||||||
|
name = "nu-macros"
|
||||||
|
version = "0.7.0"
|
||||||
|
authors = ["Yehuda Katz <wycats@gmail.com>"]
|
||||||
|
edition = "2018"
|
||||||
|
description = "Core macros for building Nushell"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-protocol = { path = "../nu-protocol", version = "0.7.0" }
|
25
crates/nu-macros/src/lib.rs
Normal file
25
crates/nu-macros/src/lib.rs
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
#[macro_export]
|
||||||
|
macro_rules! signature {
|
||||||
|
(def $name:tt {
|
||||||
|
$usage:tt
|
||||||
|
$(
|
||||||
|
$positional_name:tt $positional_ty:tt - $positional_desc:tt
|
||||||
|
)*
|
||||||
|
}) => {{
|
||||||
|
let signature = Signature::new(stringify!($name)).desc($usage);
|
||||||
|
$(
|
||||||
|
$crate::positional! { signature, $positional_name $positional_ty - $positional_desc }
|
||||||
|
)*
|
||||||
|
signature
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! positional {
|
||||||
|
($ident:tt, $name:tt (optional $shape:tt) - $desc:tt) => {
|
||||||
|
let $ident = $ident.required(stringify!($name), SyntaxShape::$shape, $desc);
|
||||||
|
};
|
||||||
|
($ident:tt, $name:tt ($shape:tt)- $desc:tt) => {
|
||||||
|
let $ident = $ident.optional(stringify!($name), SyntaxShape::$shape, $desc);
|
||||||
|
};
|
||||||
|
}
|
47
crates/nu-parser/Cargo.toml
Normal file
47
crates/nu-parser/Cargo.toml
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
[package]
|
||||||
|
name = "nu-parser"
|
||||||
|
version = "0.7.0"
|
||||||
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
|
edition = "2018"
|
||||||
|
description = "Core parser used in Nushell"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-errors = { path = "../nu-errors", version = "0.7.0" }
|
||||||
|
nu-source = { path = "../nu-source", version = "0.7.0" }
|
||||||
|
nu-protocol = { path = "../nu-protocol", version = "0.7.0" }
|
||||||
|
|
||||||
|
pretty_env_logger = "0.3.1"
|
||||||
|
pretty = "0.5.2"
|
||||||
|
termcolor = "1.0.5"
|
||||||
|
log = "0.4.8"
|
||||||
|
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||||
|
serde = { version = "1.0.102", features = ["derive"] }
|
||||||
|
nom = "5.0.1"
|
||||||
|
nom_locate = "1.0.0"
|
||||||
|
nom-tracable = "0.4.1"
|
||||||
|
num-traits = "0.2.8"
|
||||||
|
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||||
|
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||||
|
derive-new = "0.5.8"
|
||||||
|
getset = "0.0.9"
|
||||||
|
cfg-if = "0.1"
|
||||||
|
itertools = "0.8.1"
|
||||||
|
shellexpand = "1.0.0"
|
||||||
|
ansi_term = "0.12.1"
|
||||||
|
ptree = {version = "0.2" }
|
||||||
|
language-reporting = "0.4.0"
|
||||||
|
unicode-xid = "0.2.0"
|
||||||
|
enumflags2 = "0.6.2"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
pretty_assertions = "0.6.1"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
nu-build = { version = "0.7.0", path = "../nu-build" }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
stable = []
|
||||||
|
trace = ["nom-tracable/trace"]
|
3
crates/nu-parser/build.rs
Normal file
3
crates/nu-parser/build.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
nu_build::build()
|
||||||
|
}
|
35
crates/nu-parser/src/commands.rs
Normal file
35
crates/nu-parser/src/commands.rs
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
pub mod classified;
|
||||||
|
|
||||||
|
use crate::commands::classified::external::{ExternalArg, ExternalArgs, ExternalCommand};
|
||||||
|
use crate::commands::classified::ClassifiedCommand;
|
||||||
|
use crate::hir::expand_external_tokens::ExternalTokensShape;
|
||||||
|
use crate::hir::syntax_shape::{expand_syntax, ExpandContext};
|
||||||
|
use crate::hir::tokens_iterator::TokensIterator;
|
||||||
|
use nu_errors::ParseError;
|
||||||
|
use nu_source::{Spanned, Tagged};
|
||||||
|
|
||||||
|
// Classify this command as an external command, which doesn't give special meaning
|
||||||
|
// to nu syntactic constructs, and passes all arguments to the external command as
|
||||||
|
// strings.
|
||||||
|
pub(crate) fn external_command(
|
||||||
|
tokens: &mut TokensIterator,
|
||||||
|
context: &ExpandContext,
|
||||||
|
name: Tagged<&str>,
|
||||||
|
) -> Result<ClassifiedCommand, ParseError> {
|
||||||
|
let Spanned { item, span } = expand_syntax(&ExternalTokensShape, tokens, context)?.tokens;
|
||||||
|
|
||||||
|
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||||
|
name: name.to_string(),
|
||||||
|
name_tag: name.tag(),
|
||||||
|
args: ExternalArgs {
|
||||||
|
list: item
|
||||||
|
.iter()
|
||||||
|
.map(|x| ExternalArg {
|
||||||
|
tag: x.span.into(),
|
||||||
|
arg: x.item.clone(),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
}
|
92
crates/nu-parser/src/commands/classified.rs
Normal file
92
crates/nu-parser/src/commands/classified.rs
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
pub mod external;
|
||||||
|
pub mod internal;
|
||||||
|
|
||||||
|
use crate::commands::classified::external::ExternalCommand;
|
||||||
|
use crate::commands::classified::internal::InternalCommand;
|
||||||
|
use crate::hir;
|
||||||
|
use crate::parse::token_tree::TokenNode;
|
||||||
|
use derive_new::new;
|
||||||
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub enum ClassifiedCommand {
|
||||||
|
#[allow(unused)]
|
||||||
|
Expr(TokenNode),
|
||||||
|
#[allow(unused)]
|
||||||
|
Dynamic(hir::Call),
|
||||||
|
Internal(InternalCommand),
|
||||||
|
External(ExternalCommand),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for ClassifiedCommand {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
match self {
|
||||||
|
ClassifiedCommand::Expr(token) => b::typed("command", token.pretty_debug(source)),
|
||||||
|
ClassifiedCommand::Dynamic(call) => b::typed("command", call.pretty_debug(source)),
|
||||||
|
ClassifiedCommand::Internal(internal) => internal.pretty_debug(source),
|
||||||
|
ClassifiedCommand::External(external) => external.pretty_debug(source),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for ClassifiedCommand {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
match self {
|
||||||
|
ClassifiedCommand::Expr(node) => node.span(),
|
||||||
|
ClassifiedCommand::Internal(command) => command.span(),
|
||||||
|
ClassifiedCommand::Dynamic(call) => call.span,
|
||||||
|
ClassifiedCommand::External(command) => command.span(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(new, Debug, Eq, PartialEq)]
|
||||||
|
pub(crate) struct DynamicCommand {
|
||||||
|
pub(crate) args: hir::Call,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Commands {
|
||||||
|
pub list: Vec<ClassifiedCommand>,
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::ops::Deref for Commands {
|
||||||
|
type Target = [ClassifiedCommand];
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.list
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ClassifiedPipeline {
|
||||||
|
pub commands: Commands,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClassifiedPipeline {
|
||||||
|
pub fn commands(list: Vec<ClassifiedCommand>, span: impl Into<Span>) -> ClassifiedPipeline {
|
||||||
|
ClassifiedPipeline {
|
||||||
|
commands: Commands {
|
||||||
|
list,
|
||||||
|
span: span.into(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for ClassifiedPipeline {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.commands.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for ClassifiedPipeline {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
b::intersperse(
|
||||||
|
self.commands.iter().map(|c| c.pretty_debug(source)),
|
||||||
|
b::operator(" | "),
|
||||||
|
)
|
||||||
|
.or(b::delimit("<", b::description("empty pipeline"), ">"))
|
||||||
|
}
|
||||||
|
}
|
65
crates/nu-parser/src/commands/classified/external.rs
Normal file
65
crates/nu-parser/src/commands/classified/external.rs
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Tag};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub struct ExternalArg {
|
||||||
|
pub arg: String,
|
||||||
|
pub tag: Tag,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::ops::Deref for ExternalArg {
|
||||||
|
type Target = str;
|
||||||
|
|
||||||
|
fn deref(&self) -> &str {
|
||||||
|
&self.arg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub struct ExternalArgs {
|
||||||
|
pub list: Vec<ExternalArg>,
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternalArgs {
|
||||||
|
pub fn iter(&self) -> impl Iterator<Item = &ExternalArg> {
|
||||||
|
self.list.iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::ops::Deref for ExternalArgs {
|
||||||
|
type Target = [ExternalArg];
|
||||||
|
|
||||||
|
fn deref(&self) -> &[ExternalArg] {
|
||||||
|
&self.list
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub struct ExternalCommand {
|
||||||
|
pub name: String,
|
||||||
|
|
||||||
|
pub name_tag: Tag,
|
||||||
|
pub args: ExternalArgs,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for ExternalCommand {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
b::typed(
|
||||||
|
"external command",
|
||||||
|
b::description(&self.name)
|
||||||
|
+ b::preceded(
|
||||||
|
b::space(),
|
||||||
|
b::intersperse(
|
||||||
|
self.args.iter().map(|a| b::primitive(format!("{}", a.arg))),
|
||||||
|
b::space(),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for ExternalCommand {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.name_tag.span.until(self.args.span)
|
||||||
|
}
|
||||||
|
}
|
28
crates/nu-parser/src/commands/classified/internal.rs
Normal file
28
crates/nu-parser/src/commands/classified/internal.rs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
use crate::hir;
|
||||||
|
|
||||||
|
use derive_new::new;
|
||||||
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Tag};
|
||||||
|
|
||||||
|
#[derive(new, Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub struct InternalCommand {
|
||||||
|
pub name: String,
|
||||||
|
pub name_tag: Tag,
|
||||||
|
pub args: hir::Call,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for InternalCommand {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
b::typed(
|
||||||
|
"internal command",
|
||||||
|
b::description(&self.name) + b::space() + self.args.pretty_debug(source),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for InternalCommand {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
let start = self.name_tag.span;
|
||||||
|
|
||||||
|
start.until(self.args.span)
|
||||||
|
}
|
||||||
|
}
|
@ -4,30 +4,61 @@ pub(crate) mod expand_external_tokens;
|
|||||||
pub(crate) mod external_command;
|
pub(crate) mod external_command;
|
||||||
pub(crate) mod named;
|
pub(crate) mod named;
|
||||||
pub(crate) mod path;
|
pub(crate) mod path;
|
||||||
pub(crate) mod syntax_shape;
|
pub(crate) mod range;
|
||||||
|
pub(crate) mod signature;
|
||||||
|
pub mod syntax_shape;
|
||||||
pub(crate) mod tokens_iterator;
|
pub(crate) mod tokens_iterator;
|
||||||
|
|
||||||
use crate::parser::hir::path::PathMember;
|
use crate::hir::syntax_shape::Member;
|
||||||
use crate::parser::hir::syntax_shape::Member;
|
use crate::parse::operator::CompareOperator;
|
||||||
use crate::parser::{registry, Operator, Unit};
|
use crate::parse::parser::Number;
|
||||||
use crate::prelude::*;
|
use crate::parse::unit::Unit;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::Spanned;
|
use nu_protocol::{PathMember, ShellTypeName};
|
||||||
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||||
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::evaluate::Scope;
|
use crate::parse::tokens::RawNumber;
|
||||||
use crate::parser::parse::tokens::RawNumber;
|
|
||||||
|
|
||||||
pub(crate) use self::binary::Binary;
|
pub(crate) use self::binary::Binary;
|
||||||
pub(crate) use self::external_command::ExternalCommand;
|
|
||||||
pub(crate) use self::named::NamedArguments;
|
|
||||||
pub(crate) use self::path::Path;
|
pub(crate) use self::path::Path;
|
||||||
|
pub(crate) use self::range::Range;
|
||||||
pub(crate) use self::syntax_shape::ExpandContext;
|
pub(crate) use self::syntax_shape::ExpandContext;
|
||||||
pub(crate) use self::tokens_iterator::TokensIterator;
|
pub(crate) use self::tokens_iterator::TokensIterator;
|
||||||
|
|
||||||
pub use self::syntax_shape::SyntaxShape;
|
pub use self::external_command::ExternalCommand;
|
||||||
|
pub use self::named::{NamedArguments, NamedValue};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Signature {
|
||||||
|
unspanned: nu_protocol::Signature,
|
||||||
|
span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Signature {
|
||||||
|
pub fn new(unspanned: nu_protocol::Signature, span: impl Into<Span>) -> Signature {
|
||||||
|
Signature {
|
||||||
|
unspanned,
|
||||||
|
span: span.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for Signature {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for Signature {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
self.unspanned.pretty_debug(source)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
||||||
pub struct Call {
|
pub struct Call {
|
||||||
@ -60,17 +91,6 @@ impl PrettyDebugWithSource for Call {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Call {
|
|
||||||
pub fn evaluate(
|
|
||||||
&self,
|
|
||||||
registry: ®istry::CommandRegistry,
|
|
||||||
scope: &Scope,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<registry::EvaluatedArgs, ShellError> {
|
|
||||||
registry::evaluate_args(self, registry, scope, source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub enum RawExpression {
|
pub enum RawExpression {
|
||||||
Literal(Literal),
|
Literal(Literal),
|
||||||
@ -78,6 +98,7 @@ pub enum RawExpression {
|
|||||||
Synthetic(Synthetic),
|
Synthetic(Synthetic),
|
||||||
Variable(Variable),
|
Variable(Variable),
|
||||||
Binary(Box<Binary>),
|
Binary(Box<Binary>),
|
||||||
|
Range(Box<Range>),
|
||||||
Block(Vec<Expression>),
|
Block(Vec<Expression>),
|
||||||
List(Vec<Expression>),
|
List(Vec<Expression>),
|
||||||
Path(Box<Path>),
|
Path(Box<Path>),
|
||||||
@ -100,6 +121,7 @@ impl ShellTypeName for RawExpression {
|
|||||||
RawExpression::Variable(..) => "variable",
|
RawExpression::Variable(..) => "variable",
|
||||||
RawExpression::List(..) => "list",
|
RawExpression::List(..) => "list",
|
||||||
RawExpression::Binary(..) => "binary",
|
RawExpression::Binary(..) => "binary",
|
||||||
|
RawExpression::Range(..) => "range",
|
||||||
RawExpression::Block(..) => "block",
|
RawExpression::Block(..) => "block",
|
||||||
RawExpression::Path(..) => "variable path",
|
RawExpression::Path(..) => "variable path",
|
||||||
RawExpression::Boolean(..) => "boolean",
|
RawExpression::Boolean(..) => "boolean",
|
||||||
@ -169,6 +191,7 @@ impl PrettyDebugWithSource for Expression {
|
|||||||
},
|
},
|
||||||
RawExpression::Variable(_) => b::keyword(self.span.slice(source)),
|
RawExpression::Variable(_) => b::keyword(self.span.slice(source)),
|
||||||
RawExpression::Binary(binary) => binary.pretty_debug(source),
|
RawExpression::Binary(binary) => binary.pretty_debug(source),
|
||||||
|
RawExpression::Range(range) => range.pretty_debug(source),
|
||||||
RawExpression::Block(_) => b::opaque("block"),
|
RawExpression::Block(_) => b::opaque("block"),
|
||||||
RawExpression::List(list) => b::delimit(
|
RawExpression::List(list) => b::delimit(
|
||||||
"[",
|
"[",
|
||||||
@ -196,41 +219,37 @@ impl PrettyDebugWithSource for Expression {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Expression {
|
impl Expression {
|
||||||
pub(crate) fn number(i: impl Into<Number>, span: impl Into<Span>) -> Expression {
|
pub fn number(i: impl Into<Number>, span: impl Into<Span>) -> Expression {
|
||||||
let span = span.into();
|
let span = span.into();
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::Number(i.into()).into_literal(span)).into_expr(span)
|
RawExpression::Literal(RawLiteral::Number(i.into()).into_literal(span)).into_expr(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn size(
|
pub fn size(i: impl Into<Number>, unit: impl Into<Unit>, span: impl Into<Span>) -> Expression {
|
||||||
i: impl Into<Number>,
|
|
||||||
unit: impl Into<Unit>,
|
|
||||||
span: impl Into<Span>,
|
|
||||||
) -> Expression {
|
|
||||||
let span = span.into();
|
let span = span.into();
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::Size(i.into(), unit.into()).into_literal(span))
|
RawExpression::Literal(RawLiteral::Size(i.into(), unit.into()).into_literal(span))
|
||||||
.into_expr(span)
|
.into_expr(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn synthetic_string(s: impl Into<String>) -> Expression {
|
pub fn synthetic_string(s: impl Into<String>) -> Expression {
|
||||||
RawExpression::Synthetic(Synthetic::String(s.into())).into_unspanned_expr()
|
RawExpression::Synthetic(Synthetic::String(s.into())).into_unspanned_expr()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn string(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn string(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||||
let outer = outer.into();
|
let outer = outer.into();
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::String(inner.into()).into_literal(outer))
|
RawExpression::Literal(RawLiteral::String(inner.into()).into_literal(outer))
|
||||||
.into_expr(outer)
|
.into_expr(outer)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn column_path(members: Vec<Member>, span: impl Into<Span>) -> Expression {
|
pub fn column_path(members: Vec<Member>, span: impl Into<Span>) -> Expression {
|
||||||
let span = span.into();
|
let span = span.into();
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::ColumnPath(members).into_literal(span)).into_expr(span)
|
RawExpression::Literal(RawLiteral::ColumnPath(members).into_literal(span)).into_expr(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn path(
|
pub fn path(
|
||||||
head: Expression,
|
head: Expression,
|
||||||
tail: Vec<impl Into<PathMember>>,
|
tail: Vec<impl Into<PathMember>>,
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
@ -239,7 +258,7 @@ impl Expression {
|
|||||||
RawExpression::Path(Box::new(Path::new(head, tail))).into_expr(span.into())
|
RawExpression::Path(Box::new(Path::new(head, tail))).into_expr(span.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn dot_member(head: Expression, next: impl Into<PathMember>) -> Expression {
|
pub fn dot_member(head: Expression, next: impl Into<PathMember>) -> Expression {
|
||||||
let Expression { expr: item, span } = head;
|
let Expression { expr: item, span } = head;
|
||||||
let next = next.into();
|
let next = next.into();
|
||||||
|
|
||||||
@ -257,9 +276,9 @@ impl Expression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn infix(
|
pub fn infix(
|
||||||
left: Expression,
|
left: Expression,
|
||||||
op: Spanned<impl Into<Operator>>,
|
op: Spanned<impl Into<CompareOperator>>,
|
||||||
right: Expression,
|
right: Expression,
|
||||||
) -> Expression {
|
) -> Expression {
|
||||||
let new_span = left.span.until(right.span);
|
let new_span = left.span.until(right.span);
|
||||||
@ -268,36 +287,42 @@ impl Expression {
|
|||||||
.into_expr(new_span)
|
.into_expr(new_span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
|
pub fn range(left: Expression, op: Span, right: Expression) -> Expression {
|
||||||
|
let new_span = left.span.until(right.span);
|
||||||
|
|
||||||
|
RawExpression::Range(Box::new(Range::new(left, op, right))).into_expr(new_span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
|
||||||
RawExpression::FilePath(path.into()).into_expr(outer)
|
RawExpression::FilePath(path.into()).into_expr(outer)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn list(list: Vec<Expression>, span: impl Into<Span>) -> Expression {
|
pub fn list(list: Vec<Expression>, span: impl Into<Span>) -> Expression {
|
||||||
RawExpression::List(list).into_expr(span)
|
RawExpression::List(list).into_expr(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn bare(span: impl Into<Span>) -> Expression {
|
pub fn bare(span: impl Into<Span>) -> Expression {
|
||||||
let span = span.into();
|
let span = span.into();
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::Bare.into_literal(span)).into_expr(span)
|
RawExpression::Literal(RawLiteral::Bare.into_literal(span)).into_expr(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn pattern(inner: impl Into<String>, outer: impl Into<Span>) -> Expression {
|
pub fn pattern(inner: impl Into<String>, outer: impl Into<Span>) -> Expression {
|
||||||
let outer = outer.into();
|
let outer = outer.into();
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::GlobPattern(inner.into()).into_literal(outer))
|
RawExpression::Literal(RawLiteral::GlobPattern(inner.into()).into_literal(outer))
|
||||||
.into_expr(outer)
|
.into_expr(outer)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||||
RawExpression::Variable(Variable::Other(inner.into())).into_expr(outer)
|
RawExpression::Variable(Variable::Other(inner.into())).into_expr(outer)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||||
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).into_expr(outer)
|
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).into_expr(outer)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||||
RawExpression::Variable(Variable::It(inner.into())).into_expr(outer)
|
RawExpression::Variable(Variable::It(inner.into())).into_expr(outer)
|
||||||
}
|
}
|
||||||
}
|
}
|
2
crates/nu-parser/src/hir/baseline_parse.rs
Normal file
2
crates/nu-parser/src/hir/baseline_parse.rs
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
#[cfg(test)]
|
||||||
|
pub mod tests;
|
@ -1,13 +1,13 @@
|
|||||||
use crate::commands::classified::InternalCommand;
|
use crate::commands::classified::{internal::InternalCommand, ClassifiedCommand};
|
||||||
use crate::commands::ClassifiedCommand;
|
use crate::hir::TokensIterator;
|
||||||
use crate::env::host::BasicHost;
|
use crate::hir::{self, named::NamedValue, syntax_shape::*, NamedArguments};
|
||||||
use crate::parser::hir::TokensIterator;
|
use crate::parse::files::Files;
|
||||||
use crate::parser::hir::{
|
use crate::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
||||||
self, named::NamedValue, path::PathMember, syntax_shape::*, NamedArguments,
|
use crate::TokenNode;
|
||||||
};
|
use derive_new::new;
|
||||||
use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
|
||||||
use crate::parser::TokenNode;
|
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
|
use nu_errors::ShellError;
|
||||||
|
use nu_protocol::{PathMember, Signature, SyntaxShape};
|
||||||
use nu_source::{HasSpan, Span, Tag, Text};
|
use nu_source::{HasSpan, Span, Tag, Text};
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
@ -23,7 +23,7 @@ fn test_parse_string() {
|
|||||||
fn test_parse_path() {
|
fn test_parse_path() {
|
||||||
parse_tokens(
|
parse_tokens(
|
||||||
VariablePathShape,
|
VariablePathShape,
|
||||||
vec![b::var("it"), b::op("."), b::bare("cpu")],
|
vec![b::var("it"), b::dot(), b::bare("cpu")],
|
||||||
|tokens| {
|
|tokens| {
|
||||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||||
let bare = tokens[2].expect_bare();
|
let bare = tokens[2].expect_bare();
|
||||||
@ -39,9 +39,9 @@ fn test_parse_path() {
|
|||||||
VariablePathShape,
|
VariablePathShape,
|
||||||
vec![
|
vec![
|
||||||
b::var("cpu"),
|
b::var("cpu"),
|
||||||
b::op("."),
|
b::dot(),
|
||||||
b::bare("amount"),
|
b::bare("amount"),
|
||||||
b::op("."),
|
b::dot(),
|
||||||
b::string("max ghz"),
|
b::string("max ghz"),
|
||||||
],
|
],
|
||||||
|tokens| {
|
|tokens| {
|
||||||
@ -90,6 +90,43 @@ fn test_parse_command() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(new)]
|
||||||
|
struct TestRegistry {
|
||||||
|
#[new(default)]
|
||||||
|
signatures: indexmap::IndexMap<String, Signature>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestRegistry {
|
||||||
|
fn insert(&mut self, key: &str, value: Signature) {
|
||||||
|
self.signatures.insert(key.to_string(), value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SignatureRegistry for TestRegistry {
|
||||||
|
fn has(&self, name: &str) -> bool {
|
||||||
|
self.signatures.contains_key(name)
|
||||||
|
}
|
||||||
|
fn get(&self, name: &str) -> Option<Signature> {
|
||||||
|
self.signatures.get(name).map(|sig| sig.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_empty_context(source: &Text, callback: impl FnOnce(ExpandContext)) {
|
||||||
|
let mut registry = TestRegistry::new();
|
||||||
|
registry.insert(
|
||||||
|
"ls",
|
||||||
|
Signature::build("ls")
|
||||||
|
.optional(
|
||||||
|
"path",
|
||||||
|
SyntaxShape::Pattern,
|
||||||
|
"a path to get the directory contents from",
|
||||||
|
)
|
||||||
|
.switch("full", "list all available columns for each entry"),
|
||||||
|
);
|
||||||
|
|
||||||
|
callback(ExpandContext::new(Box::new(registry), source, None))
|
||||||
|
}
|
||||||
|
|
||||||
fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
|
fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
|
||||||
shape: impl ExpandSyntax<Output = T>,
|
shape: impl ExpandSyntax<Output = T>,
|
||||||
tokens: Vec<CurriedToken>,
|
tokens: Vec<CurriedToken>,
|
||||||
@ -99,7 +136,7 @@ fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
|
|||||||
let (tokens, source) = b::build(tokens);
|
let (tokens, source) = b::build(tokens);
|
||||||
let text = Text::from(source);
|
let text = Text::from(source);
|
||||||
|
|
||||||
ExpandContext::with_empty(&text, |context| {
|
with_empty_context(&text, |context| {
|
||||||
let tokens = tokens.expect_list();
|
let tokens = tokens.expect_list();
|
||||||
let mut iterator = TokensIterator::all(tokens.item, text.clone(), tokens.span);
|
let mut iterator = TokensIterator::all(tokens.item, text.clone(), tokens.span);
|
||||||
|
|
||||||
@ -108,7 +145,7 @@ fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
|
|||||||
let expr = match expr {
|
let expr = match expr {
|
||||||
Ok(expr) => expr,
|
Ok(expr) => expr,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
crate::cli::print_err(err.into(), &BasicHost, context.source().clone());
|
print_err(err.into(), &context.source().clone());
|
||||||
panic!("Parse failed");
|
panic!("Parse failed");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -120,3 +157,18 @@ fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
|
|||||||
fn inner_string_span(span: Span) -> Span {
|
fn inner_string_span(span: Span) -> Span {
|
||||||
Span::new(span.start() + 1, span.end() - 1)
|
Span::new(span.start() + 1, span.end() - 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn print_err(err: ShellError, source: &Text) {
|
||||||
|
let diag = err.to_diagnostic();
|
||||||
|
|
||||||
|
let writer = termcolor::StandardStream::stderr(termcolor::ColorChoice::Auto);
|
||||||
|
let mut source = source.to_string();
|
||||||
|
source.push_str(" ");
|
||||||
|
let files = Files::new(source);
|
||||||
|
let _ = language_reporting::emit(
|
||||||
|
&mut writer.lock(),
|
||||||
|
&files,
|
||||||
|
&diag,
|
||||||
|
&language_reporting::DefaultConfig,
|
||||||
|
);
|
||||||
|
}
|
@ -1,18 +1,17 @@
|
|||||||
use crate::parser::{hir::Expression, Operator};
|
use crate::{hir::Expression, CompareOperator};
|
||||||
use crate::prelude::*;
|
|
||||||
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::Spanned;
|
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Spanned};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(
|
#[derive(
|
||||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
||||||
)]
|
)]
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub"]
|
||||||
pub struct Binary {
|
pub struct Binary {
|
||||||
left: Expression,
|
left: Expression,
|
||||||
op: Spanned<Operator>,
|
op: Spanned<CompareOperator>,
|
||||||
right: Expression,
|
right: Expression,
|
||||||
}
|
}
|
||||||
|
|
@ -1,7 +1,4 @@
|
|||||||
use crate::errors::ParseError;
|
use crate::{
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
use crate::parser::hir::syntax_shape::FlatShape;
|
|
||||||
use crate::parser::{
|
|
||||||
hir::syntax_shape::{
|
hir::syntax_shape::{
|
||||||
color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax,
|
color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax,
|
||||||
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, MaybeSpaceShape,
|
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, MaybeSpaceShape,
|
||||||
@ -10,6 +7,8 @@ use crate::parser::{
|
|||||||
hir::Expression,
|
hir::Expression,
|
||||||
TokensIterator,
|
TokensIterator,
|
||||||
};
|
};
|
||||||
|
use nu_errors::ParseError;
|
||||||
|
use nu_protocol::SpannedTypeName;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem};
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -68,33 +67,6 @@ impl ExpandSyntax for ExternalTokensShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl ColorSyntax for ExternalTokensShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Self::Info {
|
|
||||||
loop {
|
|
||||||
// Allow a space
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
// Process an external expression. External expressions are mostly words, with a
|
|
||||||
// few exceptions (like $variables and path expansion rules)
|
|
||||||
match color_syntax(&ExternalExpression, token_nodes, context, shapes).1 {
|
|
||||||
ExternalExpressionResult::Eof => break,
|
|
||||||
ExternalExpressionResult::Processed => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl ColorSyntax for ExternalTokensShape {
|
impl ColorSyntax for ExternalTokensShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -115,7 +87,7 @@ impl ColorSyntax for ExternalTokensShape {
|
|||||||
|
|
||||||
// Process an external expression. External expressions are mostly words, with a
|
// Process an external expression. External expressions are mostly words, with a
|
||||||
// few exceptions (like $variables and path expansion rules)
|
// few exceptions (like $variables and path expansion rules)
|
||||||
match color_syntax(&ExternalExpression, token_nodes, context).1 {
|
match color_syntax(&ExternalExpressionShape, token_nodes, context).1 {
|
||||||
ExternalExpressionResult::Eof => break,
|
ExternalExpressionResult::Eof => break,
|
||||||
ExternalExpressionResult::Processed => continue,
|
ExternalExpressionResult::Processed => continue,
|
||||||
}
|
}
|
||||||
@ -144,7 +116,7 @@ impl ExpandSyntax for ExternalExpressionShape {
|
|||||||
token_nodes,
|
token_nodes,
|
||||||
"external command",
|
"external command",
|
||||||
context,
|
context,
|
||||||
ExpansionRule::new().allow_external_command(),
|
ExpansionRule::new().allow_external_word(),
|
||||||
)?
|
)?
|
||||||
.span;
|
.span;
|
||||||
|
|
||||||
@ -164,40 +136,6 @@ impl ExpandSyntax for ExternalExpressionShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct ExternalExpression;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ExternalExpression {
|
|
||||||
type Output = Option<Span>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"external expression"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
expand_syntax(&MaybeSpaceShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
let first = expand_syntax(&ExternalHeadShape, token_nodes, context)?.span;
|
|
||||||
let mut last = first;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let continuation = expand_syntax(&ExternalContinuationShape, token_nodes, context);
|
|
||||||
|
|
||||||
if let Ok(continuation) = continuation {
|
|
||||||
last = continuation.span;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Some(first.until(last)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
struct ExternalHeadShape;
|
struct ExternalHeadShape;
|
||||||
|
|
||||||
@ -229,11 +167,18 @@ impl ExpandExpression for ExternalHeadShape {
|
|||||||
UnspannedAtomicToken::Whitespace { .. } => {
|
UnspannedAtomicToken::Whitespace { .. } => {
|
||||||
unreachable!("ExpansionRule doesn't allow Whitespace")
|
unreachable!("ExpansionRule doesn't allow Whitespace")
|
||||||
}
|
}
|
||||||
|
UnspannedAtomicToken::Separator { .. } => {
|
||||||
|
unreachable!("ExpansionRule doesn't allow Separator")
|
||||||
|
}
|
||||||
|
UnspannedAtomicToken::Comment { .. } => {
|
||||||
|
unreachable!("ExpansionRule doesn't allow Comment")
|
||||||
|
}
|
||||||
UnspannedAtomicToken::ShorthandFlag { .. }
|
UnspannedAtomicToken::ShorthandFlag { .. }
|
||||||
| UnspannedAtomicToken::SquareDelimited { .. } => {
|
| UnspannedAtomicToken::SquareDelimited { .. }
|
||||||
|
| UnspannedAtomicToken::RoundDelimited { .. } => {
|
||||||
return Err(ParseError::mismatch(
|
return Err(ParseError::mismatch(
|
||||||
"external command name",
|
"external command name",
|
||||||
"pipeline".spanned(atom.span),
|
atom.spanned_type_name(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
UnspannedAtomicToken::ExternalCommand { command } => {
|
UnspannedAtomicToken::ExternalCommand { command } => {
|
||||||
@ -249,7 +194,10 @@ impl ExpandExpression for ExternalHeadShape {
|
|||||||
| UnspannedAtomicToken::GlobPattern { .. }
|
| UnspannedAtomicToken::GlobPattern { .. }
|
||||||
| UnspannedAtomicToken::Word { .. }
|
| UnspannedAtomicToken::Word { .. }
|
||||||
| UnspannedAtomicToken::Dot { .. }
|
| UnspannedAtomicToken::Dot { .. }
|
||||||
| UnspannedAtomicToken::Operator { .. } => Expression::external_command(span, span),
|
| UnspannedAtomicToken::DotDot { .. }
|
||||||
|
| UnspannedAtomicToken::CompareOperator { .. } => {
|
||||||
|
Expression::external_command(span, span)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -291,6 +239,12 @@ impl ExpandExpression for ExternalContinuationShape {
|
|||||||
UnspannedAtomicToken::Whitespace { .. } => {
|
UnspannedAtomicToken::Whitespace { .. } => {
|
||||||
unreachable!("ExpansionRule doesn't allow Whitespace")
|
unreachable!("ExpansionRule doesn't allow Whitespace")
|
||||||
}
|
}
|
||||||
|
UnspannedAtomicToken::Separator { .. } => {
|
||||||
|
unreachable!("ExpansionRule doesn't allow Separator")
|
||||||
|
}
|
||||||
|
UnspannedAtomicToken::Comment { .. } => {
|
||||||
|
unreachable!("ExpansionRule doesn't allow Comment")
|
||||||
|
}
|
||||||
UnspannedAtomicToken::String { body } => Expression::string(*body, span),
|
UnspannedAtomicToken::String { body } => Expression::string(*body, span),
|
||||||
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span),
|
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span),
|
||||||
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span),
|
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span),
|
||||||
@ -299,24 +253,25 @@ impl ExpandExpression for ExternalContinuationShape {
|
|||||||
| UnspannedAtomicToken::Word { .. }
|
| UnspannedAtomicToken::Word { .. }
|
||||||
| UnspannedAtomicToken::ShorthandFlag { .. }
|
| UnspannedAtomicToken::ShorthandFlag { .. }
|
||||||
| UnspannedAtomicToken::Dot { .. }
|
| UnspannedAtomicToken::Dot { .. }
|
||||||
| UnspannedAtomicToken::Operator { .. } => Expression::bare(span),
|
| UnspannedAtomicToken::DotDot { .. }
|
||||||
UnspannedAtomicToken::SquareDelimited { .. } => {
|
| UnspannedAtomicToken::CompareOperator { .. } => Expression::bare(span),
|
||||||
|
UnspannedAtomicToken::SquareDelimited { .. }
|
||||||
|
| UnspannedAtomicToken::RoundDelimited { .. } => {
|
||||||
return Err(ParseError::mismatch(
|
return Err(ParseError::mismatch(
|
||||||
"external argument",
|
"external argument",
|
||||||
"pipeline".spanned(atom.span),
|
atom.spanned_type_name(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
impl ColorSyntax for ExternalExpressionShape {
|
||||||
impl ColorSyntax for ExternalExpression {
|
|
||||||
type Info = ExternalExpressionResult;
|
type Info = ExternalExpressionResult;
|
||||||
type Input = ();
|
type Input = ();
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"ExternalExpression"
|
"ExternalExpressionShape"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn color_syntax<'a, 'b>(
|
||||||
@ -345,38 +300,7 @@ impl ColorSyntax for ExternalExpression {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
enum ExternalExpressionResult {
|
pub enum ExternalExpressionResult {
|
||||||
Eof,
|
Eof,
|
||||||
Processed,
|
Processed,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl ColorSyntax for ExternalExpression {
|
|
||||||
type Info = ExternalExpressionResult;
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> ExternalExpressionResult {
|
|
||||||
let atom = match expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"external word",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
) {
|
|
||||||
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
|
|
||||||
Ok(AtomicToken {
|
|
||||||
unspanned: UnspannedAtomicToken::Eof { .. },
|
|
||||||
..
|
|
||||||
}) => return ExternalExpressionResult::Eof,
|
|
||||||
Ok(atom) => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
atom.color_tokens(shapes);
|
|
||||||
return ExternalExpressionResult::Processed;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,12 +1,12 @@
|
|||||||
use crate::prelude::*;
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
|
use nu_source::Span;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(
|
#[derive(
|
||||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
||||||
)]
|
)]
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub"]
|
||||||
pub struct ExternalCommand {
|
pub struct ExternalCommand {
|
||||||
pub(crate) name: Span,
|
pub(crate) name: Span,
|
||||||
}
|
}
|
@ -1,8 +1,8 @@
|
|||||||
use crate::parser::hir::Expression;
|
use crate::hir::Expression;
|
||||||
use crate::parser::Flag;
|
use crate::Flag;
|
||||||
use crate::prelude::*;
|
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
|
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Tag};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||||
@ -26,7 +26,7 @@ impl PrettyDebugWithSource for NamedValue {
|
|||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||||
pub struct NamedArguments {
|
pub struct NamedArguments {
|
||||||
pub(crate) named: IndexMap<String, NamedValue>,
|
pub named: IndexMap<String, NamedValue>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NamedArguments {
|
impl NamedArguments {
|
||||||
@ -35,6 +35,10 @@ impl NamedArguments {
|
|||||||
named: IndexMap::new(),
|
named: IndexMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn iter(&self) -> impl Iterator<Item = (&String, &NamedValue)> {
|
||||||
|
self.named.iter()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NamedArguments {
|
impl NamedArguments {
|
41
crates/nu-parser/src/hir/path.rs
Normal file
41
crates/nu-parser/src/hir/path.rs
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
use crate::hir::Expression;
|
||||||
|
use derive_new::new;
|
||||||
|
use getset::{Getters, MutGetters};
|
||||||
|
use nu_protocol::PathMember;
|
||||||
|
use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Debug,
|
||||||
|
Clone,
|
||||||
|
Eq,
|
||||||
|
PartialEq,
|
||||||
|
Ord,
|
||||||
|
PartialOrd,
|
||||||
|
Hash,
|
||||||
|
Getters,
|
||||||
|
MutGetters,
|
||||||
|
Serialize,
|
||||||
|
Deserialize,
|
||||||
|
new,
|
||||||
|
)]
|
||||||
|
#[get = "pub"]
|
||||||
|
pub struct Path {
|
||||||
|
head: Expression,
|
||||||
|
#[get_mut = "pub(crate)"]
|
||||||
|
tail: Vec<PathMember>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for Path {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
self.head.pretty_debug(source)
|
||||||
|
+ b::operator(".")
|
||||||
|
+ b::intersperse(self.tail.iter().map(|m| m.pretty()), b::operator("."))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Path {
|
||||||
|
pub(crate) fn parts(self) -> (Expression, Vec<PathMember>) {
|
||||||
|
(self.head, self.tail)
|
||||||
|
}
|
||||||
|
}
|
33
crates/nu-parser/src/hir/range.rs
Normal file
33
crates/nu-parser/src/hir/range.rs
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
use crate::hir::Expression;
|
||||||
|
|
||||||
|
use derive_new::new;
|
||||||
|
use getset::Getters;
|
||||||
|
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(
|
||||||
|
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
||||||
|
)]
|
||||||
|
pub struct Range {
|
||||||
|
#[get = "pub"]
|
||||||
|
left: Expression,
|
||||||
|
#[get = "pub"]
|
||||||
|
dotdot: Span,
|
||||||
|
#[get = "pub"]
|
||||||
|
right: Expression,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for Range {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
b::delimit(
|
||||||
|
"<",
|
||||||
|
self.left.pretty_debug(source)
|
||||||
|
+ b::space()
|
||||||
|
+ b::keyword(self.dotdot.slice(source))
|
||||||
|
+ b::space()
|
||||||
|
+ self.right.pretty_debug(source),
|
||||||
|
">",
|
||||||
|
)
|
||||||
|
.group()
|
||||||
|
}
|
||||||
|
}
|
481
crates/nu-parser/src/hir/signature.rs
Normal file
481
crates/nu-parser/src/hir/signature.rs
Normal file
@ -0,0 +1,481 @@
|
|||||||
|
use crate::hir;
|
||||||
|
use crate::hir::syntax_shape::{
|
||||||
|
expand_atom, expand_syntax, BareShape, ExpandContext, ExpandSyntax, ExpansionRule,
|
||||||
|
UnspannedAtomicToken, WhitespaceShape,
|
||||||
|
};
|
||||||
|
use crate::hir::tokens_iterator::TokensIterator;
|
||||||
|
use crate::parse::comment::Comment;
|
||||||
|
use derive_new::new;
|
||||||
|
use nu_errors::ParseError;
|
||||||
|
use nu_protocol::{RowType, SpannedTypeName, Type};
|
||||||
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasFallibleSpan, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||||
|
};
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
// A Signature is a command without implementation.
|
||||||
|
//
|
||||||
|
// In Nu, a command is a function combined with macro expansion rules.
|
||||||
|
//
|
||||||
|
// def cd
|
||||||
|
// # Change to a new path.
|
||||||
|
// optional directory(Path) # the directory to change to
|
||||||
|
// end
|
||||||
|
|
||||||
|
#[derive(new)]
|
||||||
|
struct Expander<'a, 'b, 'c, 'd> {
|
||||||
|
iterator: &'b mut TokensIterator<'a>,
|
||||||
|
context: &'d ExpandContext<'c>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'b, 'c, 'd> Expander<'a, 'b, 'c, 'd> {
|
||||||
|
fn expand<O>(&mut self, syntax: impl ExpandSyntax<Output = O>) -> Result<O, ParseError>
|
||||||
|
where
|
||||||
|
O: HasFallibleSpan + Clone + std::fmt::Debug + 'static,
|
||||||
|
{
|
||||||
|
expand_syntax(&syntax, self.iterator, self.context)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn optional<O>(&mut self, syntax: impl ExpandSyntax<Output = O>) -> Option<O>
|
||||||
|
where
|
||||||
|
O: HasFallibleSpan + Clone + std::fmt::Debug + 'static,
|
||||||
|
{
|
||||||
|
match expand_syntax(&syntax, self.iterator, self.context) {
|
||||||
|
Err(_) => None,
|
||||||
|
Ok(value) => Some(value),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pos(&mut self) -> Span {
|
||||||
|
self.iterator.span_at_cursor()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn slice_string(&mut self, span: impl Into<Span>) -> String {
|
||||||
|
span.into().slice(self.context.source()).to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
struct SignatureShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for SignatureShape {
|
||||||
|
type Output = hir::Signature;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"signature"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
let mut expander = Expander::new(token_nodes, context);
|
||||||
|
let start = expander.pos();
|
||||||
|
expander.expand(keyword("def"))?;
|
||||||
|
expander.expand(WhitespaceShape)?;
|
||||||
|
let name = expander.expand(BareShape)?;
|
||||||
|
expander.expand(SeparatorShape)?;
|
||||||
|
let usage = expander.expand(CommentShape)?;
|
||||||
|
expander.expand(SeparatorShape)?;
|
||||||
|
let end = expander.pos();
|
||||||
|
|
||||||
|
Ok(hir::Signature::new(
|
||||||
|
nu_protocol::Signature::new(&name.word).desc(expander.slice_string(usage.text)),
|
||||||
|
start.until(end),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn keyword(kw: &'static str) -> KeywordShape {
|
||||||
|
KeywordShape { keyword: kw }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
struct KeywordShape {
|
||||||
|
keyword: &'static str,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExpandSyntax for KeywordShape {
|
||||||
|
type Output = Span;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"keyword"
|
||||||
|
}
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
let atom = expand_atom(token_nodes, "keyword", context, ExpansionRule::new())?;
|
||||||
|
|
||||||
|
match &atom.unspanned {
|
||||||
|
UnspannedAtomicToken::Word { text } => {
|
||||||
|
let word = text.slice(context.source());
|
||||||
|
|
||||||
|
if word == self.keyword {
|
||||||
|
return Ok(atom.span);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(ParseError::mismatch(self.keyword, atom.spanned_type_name()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
struct SeparatorShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for SeparatorShape {
|
||||||
|
type Output = Span;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"separator"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
let atom = expand_atom(token_nodes, "separator", context, ExpansionRule::new())?;
|
||||||
|
|
||||||
|
match &atom.unspanned {
|
||||||
|
UnspannedAtomicToken::Separator { text } => Ok(*text),
|
||||||
|
_ => Err(ParseError::mismatch("separator", atom.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
struct CommentShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for CommentShape {
|
||||||
|
type Output = Comment;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"comment"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
let atom = expand_atom(token_nodes, "comment", context, ExpansionRule::new())?;
|
||||||
|
|
||||||
|
match &atom.unspanned {
|
||||||
|
UnspannedAtomicToken::Comment { body } => Ok(Comment::line(body, atom.span)),
|
||||||
|
_ => Err(ParseError::mismatch("separator", atom.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, new)]
|
||||||
|
struct TupleShape<A, B> {
|
||||||
|
first: A,
|
||||||
|
second: B,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, new)]
|
||||||
|
struct TupleSyntax<A, B> {
|
||||||
|
first: A,
|
||||||
|
second: B,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<A, B> PrettyDebugWithSource for TupleSyntax<A, B>
|
||||||
|
where
|
||||||
|
A: PrettyDebugWithSource,
|
||||||
|
B: PrettyDebugWithSource,
|
||||||
|
{
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
b::typed(
|
||||||
|
"pair",
|
||||||
|
self.first.pretty_debug(source) + b::space() + self.second.pretty_debug(source),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<A, B> HasFallibleSpan for TupleSyntax<A, B>
|
||||||
|
where
|
||||||
|
A: HasFallibleSpan + Debug + Clone,
|
||||||
|
B: HasFallibleSpan + Debug + Clone,
|
||||||
|
{
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
match (self.first.maybe_span(), self.second.maybe_span()) {
|
||||||
|
(Some(first), Some(second)) => Some(first.until(second)),
|
||||||
|
(Some(first), None) => Some(first),
|
||||||
|
(None, Some(second)) => Some(second),
|
||||||
|
(None, None) => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<A, B, AOut, BOut> ExpandSyntax for TupleShape<A, B>
|
||||||
|
where
|
||||||
|
A: ExpandSyntax<Output = AOut> + Debug + Copy,
|
||||||
|
B: ExpandSyntax<Output = BOut> + Debug + Copy,
|
||||||
|
AOut: HasFallibleSpan + Debug + Clone + 'static,
|
||||||
|
BOut: HasFallibleSpan + Debug + Clone + 'static,
|
||||||
|
{
|
||||||
|
type Output = TupleSyntax<AOut, BOut>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"pair"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
let first = expand_syntax(&self.first, token_nodes, context)?;
|
||||||
|
let second = expand_syntax(&self.second, token_nodes, context)?;
|
||||||
|
|
||||||
|
Ok(TupleSyntax { first, second })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct PositionalParam {
|
||||||
|
optional: Option<Span>,
|
||||||
|
name: Identifier,
|
||||||
|
ty: Spanned<Type>,
|
||||||
|
desc: Spanned<String>,
|
||||||
|
span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for PositionalParam {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for PositionalParam {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
(match self.optional {
|
||||||
|
Some(_) => b::description("optional") + b::space(),
|
||||||
|
None => b::blank(),
|
||||||
|
}) + self.ty.pretty_debug(source)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct PositionalParamShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for PositionalParamShape {
|
||||||
|
type Output = PositionalParam;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"positional param"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
let mut expander = Expander::new(token_nodes, context);
|
||||||
|
|
||||||
|
let optional = expander
|
||||||
|
.optional(TupleShape::new(keyword("optional"), WhitespaceShape))
|
||||||
|
.map(|s| s.first);
|
||||||
|
|
||||||
|
let name = expander.expand(IdentifierShape)?;
|
||||||
|
|
||||||
|
expander.optional(WhitespaceShape);
|
||||||
|
|
||||||
|
let _ty = expander.expand(TypeShape)?;
|
||||||
|
|
||||||
|
Ok(PositionalParam {
|
||||||
|
optional,
|
||||||
|
name,
|
||||||
|
ty: Type::Nothing.spanned(Span::unknown()),
|
||||||
|
desc: format!("").spanned(Span::unknown()),
|
||||||
|
span: Span::unknown(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct Identifier {
|
||||||
|
body: String,
|
||||||
|
span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for Identifier {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for Identifier {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
b::typed("id", b::description(self.span.slice(source)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
struct IdentifierShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for IdentifierShape {
|
||||||
|
type Output = Identifier;
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"identifier"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
let atom = expand_atom(token_nodes, "identifier", context, ExpansionRule::new())?;
|
||||||
|
|
||||||
|
match atom.unspanned {
|
||||||
|
UnspannedAtomicToken::Word { text } => {
|
||||||
|
let body = text.slice(context.source());
|
||||||
|
if is_id(body) {
|
||||||
|
return Ok(Identifier {
|
||||||
|
body: body.to_string(),
|
||||||
|
span: text,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(ParseError::mismatch("identifier", atom.spanned_type_name()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_id(input: &str) -> bool {
|
||||||
|
let source = nu_source::nom_input(input);
|
||||||
|
match crate::parse::parser::ident(source) {
|
||||||
|
Err(_) => false,
|
||||||
|
Ok((input, _)) => input.fragment.len() == 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, new)]
|
||||||
|
struct TypeSyntax {
|
||||||
|
ty: Type,
|
||||||
|
span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for TypeSyntax {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for TypeSyntax {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
self.ty.pretty_debug(source)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
struct TypeShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for TypeShape {
|
||||||
|
type Output = TypeSyntax;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"type"
|
||||||
|
}
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
let atom = expand_atom(token_nodes, "type", context, ExpansionRule::new())?;
|
||||||
|
|
||||||
|
match atom.unspanned {
|
||||||
|
UnspannedAtomicToken::Word { text } => {
|
||||||
|
let word = text.slice(context.source());
|
||||||
|
|
||||||
|
Ok(TypeSyntax::new(
|
||||||
|
match word {
|
||||||
|
"nothing" => Type::Nothing,
|
||||||
|
"integer" => Type::Int,
|
||||||
|
"decimal" => Type::Decimal,
|
||||||
|
"bytesize" => Type::Bytesize,
|
||||||
|
"string" => Type::String,
|
||||||
|
"column-path" => Type::ColumnPath,
|
||||||
|
"pattern" => Type::Pattern,
|
||||||
|
"boolean" => Type::Boolean,
|
||||||
|
"date" => Type::Date,
|
||||||
|
"duration" => Type::Duration,
|
||||||
|
"filename" => Type::Path,
|
||||||
|
"binary" => Type::Binary,
|
||||||
|
"row" => Type::Row(RowType::new()),
|
||||||
|
"table" => Type::Table(vec![]),
|
||||||
|
"block" => Type::Block,
|
||||||
|
_ => return Err(ParseError::mismatch("type", atom.spanned_type_name())),
|
||||||
|
},
|
||||||
|
atom.span,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
_ => Err(ParseError::mismatch("type", atom.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
struct TypeAnnotation;
|
||||||
|
|
||||||
|
impl ExpandSyntax for TypeAnnotation {
|
||||||
|
type Output = TypeSyntax;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"type annotation"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
let atom = expand_atom(
|
||||||
|
token_nodes,
|
||||||
|
"type annotation",
|
||||||
|
context,
|
||||||
|
ExpansionRule::new(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
match atom.unspanned {
|
||||||
|
UnspannedAtomicToken::RoundDelimited { nodes, .. } => {
|
||||||
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
token_nodes.child(
|
||||||
|
(&nodes[..]).spanned(atom.span),
|
||||||
|
context.source().clone(),
|
||||||
|
|token_nodes| {
|
||||||
|
let ty = expand_syntax(&TypeShape, token_nodes, context)?;
|
||||||
|
|
||||||
|
let next = token_nodes.peek_non_ws();
|
||||||
|
|
||||||
|
match next.node {
|
||||||
|
None => Ok(ty),
|
||||||
|
Some(node) => {
|
||||||
|
Err(ParseError::extra_tokens(node.spanned_type_name()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => Err(ParseError::mismatch(
|
||||||
|
"type annotation",
|
||||||
|
atom.spanned_type_name(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,23 +1,27 @@
|
|||||||
mod block;
|
mod block;
|
||||||
mod expression;
|
mod expression;
|
||||||
pub(crate) mod flat_shape;
|
pub mod flat_shape;
|
||||||
|
|
||||||
use crate::cli::external_command;
|
use crate::commands::classified::internal::InternalCommand;
|
||||||
use crate::commands::{
|
use crate::commands::classified::{ClassifiedCommand, ClassifiedPipeline};
|
||||||
classified::{ClassifiedPipeline, InternalCommand},
|
use crate::commands::external_command;
|
||||||
ClassifiedCommand, Command,
|
use crate::hir;
|
||||||
};
|
use crate::hir::expand_external_tokens::ExternalTokensShape;
|
||||||
use crate::parser::hir::expand_external_tokens::ExternalTokensShape;
|
use crate::hir::syntax_shape::block::AnyBlockShape;
|
||||||
use crate::parser::hir::syntax_shape::block::AnyBlockShape;
|
use crate::hir::syntax_shape::expression::range::RangeShape;
|
||||||
use crate::parser::hir::tokens_iterator::Peeked;
|
use crate::hir::tokens_iterator::{Peeked, TokensIterator};
|
||||||
use crate::parser::parse::tokens::Token;
|
use crate::parse::operator::EvaluationOperator;
|
||||||
use crate::parser::parse_command::{parse_command_tail, CommandTailShape};
|
use crate::parse::token_tree::TokenNode;
|
||||||
use crate::parser::{hir, hir::TokensIterator, Operator, TokenNode, UnspannedToken};
|
use crate::parse::tokens::{Token, UnspannedToken};
|
||||||
use crate::prelude::*;
|
use crate::parse_command::{parse_command_tail, CommandTailShape};
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::Spanned;
|
use nu_errors::{ParseError, ShellError};
|
||||||
use serde::{Deserialize, Serialize};
|
use nu_protocol::{ShellTypeName, Signature};
|
||||||
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasFallibleSpan, HasSpan, PrettyDebug, PrettyDebugWithSource, Span,
|
||||||
|
Spanned, SpannedItem, Tag, TaggedItem, Text,
|
||||||
|
};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
pub(crate) use self::expression::atom::{
|
pub(crate) use self::expression::atom::{
|
||||||
@ -40,89 +44,8 @@ pub(crate) use self::expression::variable_path::{
|
|||||||
pub(crate) use self::expression::{continue_expression, AnyExpressionShape};
|
pub(crate) use self::expression::{continue_expression, AnyExpressionShape};
|
||||||
pub(crate) use self::flat_shape::FlatShape;
|
pub(crate) use self::flat_shape::FlatShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
use nu_protocol::SyntaxShape;
|
||||||
use crate::parser::hir::tokens_iterator::debug::debug_tokens;
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
use crate::parser::parse::pipeline::Pipeline;
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
use log::{log_enabled, trace};
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
|
||||||
pub enum SyntaxShape {
|
|
||||||
Any,
|
|
||||||
String,
|
|
||||||
Member,
|
|
||||||
ColumnPath,
|
|
||||||
Number,
|
|
||||||
Int,
|
|
||||||
Path,
|
|
||||||
Pattern,
|
|
||||||
Block,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for SyntaxShape {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
b::kind(match self {
|
|
||||||
SyntaxShape::Any => "any shape",
|
|
||||||
SyntaxShape::String => "string shape",
|
|
||||||
SyntaxShape::Member => "member shape",
|
|
||||||
SyntaxShape::ColumnPath => "column path shape",
|
|
||||||
SyntaxShape::Number => "number shape",
|
|
||||||
SyntaxShape::Int => "integer shape",
|
|
||||||
SyntaxShape::Path => "file path shape",
|
|
||||||
SyntaxShape::Pattern => "pattern shape",
|
|
||||||
SyntaxShape::Block => "block shape",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for SyntaxShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
match self {
|
|
||||||
SyntaxShape::Any => {
|
|
||||||
color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes)
|
|
||||||
}
|
|
||||||
SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context, shapes),
|
|
||||||
SyntaxShape::String => color_fallible_syntax_with(
|
|
||||||
&StringShape,
|
|
||||||
&FlatShape::String,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
shapes,
|
|
||||||
),
|
|
||||||
SyntaxShape::Member => {
|
|
||||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)
|
|
||||||
}
|
|
||||||
SyntaxShape::ColumnPath => {
|
|
||||||
color_fallible_syntax(&ColumnPathShape, token_nodes, context, shapes)
|
|
||||||
}
|
|
||||||
SyntaxShape::Number => {
|
|
||||||
color_fallible_syntax(&NumberShape, token_nodes, context, shapes)
|
|
||||||
}
|
|
||||||
SyntaxShape::Path => {
|
|
||||||
color_fallible_syntax(&FilePathShape, token_nodes, context, shapes)
|
|
||||||
}
|
|
||||||
SyntaxShape::Pattern => {
|
|
||||||
color_fallible_syntax(&PatternShape, token_nodes, context, shapes)
|
|
||||||
}
|
|
||||||
SyntaxShape::Block => {
|
|
||||||
color_fallible_syntax(&AnyBlockShape, token_nodes, context, shapes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for SyntaxShape {
|
impl FallibleColorSyntax for SyntaxShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -140,6 +63,7 @@ impl FallibleColorSyntax for SyntaxShape {
|
|||||||
match self {
|
match self {
|
||||||
SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context),
|
SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context),
|
||||||
SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context),
|
SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context),
|
||||||
|
SyntaxShape::Range => color_fallible_syntax(&RangeShape, token_nodes, context),
|
||||||
SyntaxShape::String => {
|
SyntaxShape::String => {
|
||||||
color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context)
|
color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context)
|
||||||
}
|
}
|
||||||
@ -160,6 +84,7 @@ impl ExpandExpression for SyntaxShape {
|
|||||||
match self {
|
match self {
|
||||||
SyntaxShape::Any => "shape[any]",
|
SyntaxShape::Any => "shape[any]",
|
||||||
SyntaxShape::Int => "shape[integer]",
|
SyntaxShape::Int => "shape[integer]",
|
||||||
|
SyntaxShape::Range => "shape[range]",
|
||||||
SyntaxShape::String => "shape[string]",
|
SyntaxShape::String => "shape[string]",
|
||||||
SyntaxShape::Member => "shape[column name]",
|
SyntaxShape::Member => "shape[column name]",
|
||||||
SyntaxShape::ColumnPath => "shape[column path]",
|
SyntaxShape::ColumnPath => "shape[column path]",
|
||||||
@ -178,6 +103,7 @@ impl ExpandExpression for SyntaxShape {
|
|||||||
match self {
|
match self {
|
||||||
SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context),
|
SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context),
|
||||||
SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context),
|
SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context),
|
||||||
|
SyntaxShape::Range => expand_expr(&RangeShape, token_nodes, context),
|
||||||
SyntaxShape::String => expand_expr(&StringShape, token_nodes, context),
|
SyntaxShape::String => expand_expr(&StringShape, token_nodes, context),
|
||||||
SyntaxShape::Member => {
|
SyntaxShape::Member => {
|
||||||
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||||
@ -200,13 +126,17 @@ impl ExpandExpression for SyntaxShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait SignatureRegistry {
|
||||||
|
fn has(&self, name: &str) -> bool;
|
||||||
|
fn get(&self, name: &str) -> Option<Signature>;
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Getters, new)]
|
#[derive(Getters, new)]
|
||||||
pub struct ExpandContext<'context> {
|
pub struct ExpandContext<'context> {
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
registry: &'context CommandRegistry,
|
pub registry: Box<dyn SignatureRegistry>,
|
||||||
#[get = "pub(crate)"]
|
pub source: &'context Text,
|
||||||
source: &'context Text,
|
pub homedir: Option<PathBuf>,
|
||||||
homedir: Option<PathBuf>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'context> ExpandContext<'context> {
|
impl<'context> ExpandContext<'context> {
|
||||||
@ -214,19 +144,8 @@ impl<'context> ExpandContext<'context> {
|
|||||||
self.homedir.as_ref().map(|h| h.as_path())
|
self.homedir.as_ref().map(|h| h.as_path())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
pub(crate) fn source(&self) -> &'context Text {
|
||||||
pub fn with_empty(source: &Text, callback: impl FnOnce(ExpandContext)) {
|
self.source
|
||||||
let mut registry = CommandRegistry::new();
|
|
||||||
registry.insert(
|
|
||||||
"ls",
|
|
||||||
crate::commands::whole_stream_command(crate::commands::LS),
|
|
||||||
);
|
|
||||||
|
|
||||||
callback(ExpandContext {
|
|
||||||
registry: ®istry,
|
|
||||||
source,
|
|
||||||
homedir: None,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -248,7 +167,6 @@ pub trait ExpandExpression: std::fmt::Debug + Copy {
|
|||||||
) -> Result<hir::Expression, ParseError>;
|
) -> Result<hir::Expression, ParseError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
|
pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
|
||||||
type Info;
|
type Info;
|
||||||
type Input;
|
type Input;
|
||||||
@ -263,35 +181,6 @@ pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
|
|||||||
) -> Result<Self::Info, ShellError>;
|
) -> Result<Self::Info, ShellError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
|
|
||||||
type Info;
|
|
||||||
type Input;
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
input: &Self::Input,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<Self::Info, ShellError>;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
pub trait ColorSyntax: std::fmt::Debug + Copy {
|
|
||||||
type Info;
|
|
||||||
type Input;
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
input: &Self::Input,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Self::Info;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub trait ColorSyntax: std::fmt::Debug + Copy {
|
pub trait ColorSyntax: std::fmt::Debug + Copy {
|
||||||
type Info;
|
type Info;
|
||||||
type Input;
|
type Input;
|
||||||
@ -306,7 +195,7 @@ pub trait ColorSyntax: std::fmt::Debug + Copy {
|
|||||||
) -> Self::Info;
|
) -> Self::Info;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy {
|
pub trait ExpandSyntax: std::fmt::Debug + Copy {
|
||||||
type Output: HasFallibleSpan + Clone + std::fmt::Debug + 'static;
|
type Output: HasFallibleSpan + Clone + std::fmt::Debug + 'static;
|
||||||
|
|
||||||
fn name(&self) -> &'static str;
|
fn name(&self) -> &'static str;
|
||||||
@ -318,7 +207,7 @@ pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy {
|
|||||||
) -> Result<Self::Output, ParseError>;
|
) -> Result<Self::Output, ParseError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>(
|
pub fn expand_syntax<'a, 'b, T: ExpandSyntax>(
|
||||||
shape: &T,
|
shape: &T,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
context: &ExpandContext,
|
||||||
@ -338,7 +227,6 @@ pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
|
pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
|
||||||
shape: &T,
|
shape: &T,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
@ -352,70 +240,6 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
|
|
||||||
shape: &T,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> ((), U) {
|
|
||||||
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
|
|
||||||
|
|
||||||
let len = shapes.len();
|
|
||||||
let result = shape.color_syntax(&(), token_nodes, context, shapes);
|
|
||||||
|
|
||||||
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
|
|
||||||
|
|
||||||
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
|
|
||||||
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
|
|
||||||
|
|
||||||
if len < shapes.len() {
|
|
||||||
for i in len..(shapes.len()) {
|
|
||||||
trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
trace!(target: "nu::color_syntax", "no new shapes");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
((), result)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()>, U>(
|
|
||||||
shape: &T,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<U, ShellError> {
|
|
||||||
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
|
|
||||||
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
trace!(target: "nu::color_syntax", "at eof");
|
|
||||||
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
|
|
||||||
}
|
|
||||||
|
|
||||||
let len = shapes.len();
|
|
||||||
let result = shape.color_syntax(&(), token_nodes, context, shapes);
|
|
||||||
|
|
||||||
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
|
|
||||||
|
|
||||||
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
|
|
||||||
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
|
|
||||||
|
|
||||||
if len < shapes.len() {
|
|
||||||
for i in len..(shapes.len()) {
|
|
||||||
trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
trace!(target: "nu::color_syntax", "no new shapes");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()>, U>(
|
pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()>, U>(
|
||||||
shape: &T,
|
shape: &T,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
@ -426,37 +250,6 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
|
|
||||||
shape: &T,
|
|
||||||
input: &I,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> ((), U) {
|
|
||||||
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
|
|
||||||
|
|
||||||
let len = shapes.len();
|
|
||||||
let result = shape.color_syntax(input, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
|
|
||||||
|
|
||||||
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
|
|
||||||
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
|
|
||||||
|
|
||||||
if len < shapes.len() {
|
|
||||||
for i in len..(shapes.len()) {
|
|
||||||
trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
trace!(target: "nu::color_syntax", "no new shapes");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
((), result)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
|
pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
|
||||||
shape: &T,
|
shape: &T,
|
||||||
input: &I,
|
input: &I,
|
||||||
@ -471,20 +264,6 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input = I>, U, I>(
|
|
||||||
shape: &T,
|
|
||||||
input: &I,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<U, ShellError> {
|
|
||||||
token_nodes.color_fallible_frame(std::any::type_name::<T>(), |token_nodes| {
|
|
||||||
shape.color_syntax(input, token_nodes, context, shapes)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input = I>, U, I>(
|
pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input = I>, U, I>(
|
||||||
shape: &T,
|
shape: &T,
|
||||||
input: &I,
|
input: &I,
|
||||||
@ -604,7 +383,7 @@ impl ExpandSyntax for BarePathShape {
|
|||||||
..
|
..
|
||||||
})
|
})
|
||||||
| TokenNode::Token(Token {
|
| TokenNode::Token(Token {
|
||||||
unspanned: UnspannedToken::Operator(Operator::Dot),
|
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
||||||
..
|
..
|
||||||
}) => true,
|
}) => true,
|
||||||
|
|
||||||
@ -616,37 +395,6 @@ impl ExpandSyntax for BarePathShape {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct BareShape;
|
pub struct BareShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for BareShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = FlatShape;
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
input: &FlatShape,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
_context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes
|
|
||||||
.peek_any_token("word", |token| match token {
|
|
||||||
// If it's a bare token, color it
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
span,
|
|
||||||
}) => {
|
|
||||||
shapes.push((*input).spanned(*span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// otherwise, fail
|
|
||||||
other => Err(ParseError::mismatch("word", other.spanned_type_name())),
|
|
||||||
})
|
|
||||||
.map_err(|err| err.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for BareShape {
|
impl FallibleColorSyntax for BareShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = FlatShape;
|
type Input = FlatShape;
|
||||||
@ -747,7 +495,7 @@ impl TestSyntax for BareShape {
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum CommandSignature {
|
pub enum CommandSignature {
|
||||||
Internal(Spanned<Arc<Command>>),
|
Internal(Spanned<Signature>),
|
||||||
LiteralExternal { outer: Span, inner: Span },
|
LiteralExternal { outer: Span, inner: Span },
|
||||||
External(Span),
|
External(Span),
|
||||||
Expression(hir::Expression),
|
Expression(hir::Expression),
|
||||||
@ -757,7 +505,7 @@ impl PrettyDebugWithSource for CommandSignature {
|
|||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
CommandSignature::Internal(internal) => {
|
CommandSignature::Internal(internal) => {
|
||||||
b::typed("command", b::description(internal.name()))
|
b::typed("command", b::description(&internal.name))
|
||||||
}
|
}
|
||||||
CommandSignature::LiteralExternal { outer, .. } => {
|
CommandSignature::LiteralExternal { outer, .. } => {
|
||||||
b::typed("command", b::description(outer.slice(source)))
|
b::typed("command", b::description(outer.slice(source)))
|
||||||
@ -805,43 +553,6 @@ impl CommandSignature {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct PipelineShape;
|
pub struct PipelineShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
// The failure mode is if the head of the token stream is not a pipeline
|
|
||||||
impl FallibleColorSyntax for PipelineShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
// Make sure we're looking at a pipeline
|
|
||||||
let Pipeline { parts, .. } =
|
|
||||||
token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?;
|
|
||||||
|
|
||||||
// Enumerate the pipeline parts
|
|
||||||
for part in parts {
|
|
||||||
// If the pipeline part has a prefix `|`, emit a pipe to color
|
|
||||||
if let Some(pipe) = part.pipe {
|
|
||||||
shapes.push(FlatShape::Pipe.spanned(pipe));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a new iterator containing the tokens in the pipeline part to color
|
|
||||||
let mut token_nodes =
|
|
||||||
TokensIterator::new(&part.tokens(), part.span(), context.source.clone(), false);
|
|
||||||
|
|
||||||
color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes);
|
|
||||||
color_syntax(&CommandShape, &mut token_nodes, context, shapes);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
// The failure mode is if the head of the token stream is not a pipeline
|
// The failure mode is if the head of the token stream is not a pipeline
|
||||||
impl FallibleColorSyntax for PipelineShape {
|
impl FallibleColorSyntax for PipelineShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
@ -881,46 +592,6 @@ impl FallibleColorSyntax for PipelineShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl ExpandSyntax for PipelineShape {
|
|
||||||
type Output = ClassifiedPipeline;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"pipeline"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_syntax<'content, 'me>(
|
|
||||||
&self,
|
|
||||||
iterator: &'me mut TokensIterator<'content>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
let start = iterator.span_at_cursor();
|
|
||||||
|
|
||||||
let peeked = iterator.peek_any().not_eof("pipeline")?;
|
|
||||||
let pipeline = peeked.commit().as_pipeline()?;
|
|
||||||
|
|
||||||
let parts = &pipeline.parts[..];
|
|
||||||
|
|
||||||
let mut out = vec![];
|
|
||||||
|
|
||||||
for part in parts {
|
|
||||||
let tokens: Spanned<&[TokenNode]> = part.tokens().spanned(part.span());
|
|
||||||
|
|
||||||
let classified =
|
|
||||||
iterator.child(tokens, context.source.clone(), move |token_nodes| {
|
|
||||||
expand_syntax(&ClassifiedCommandShape, token_nodes, context)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
out.push(classified);
|
|
||||||
}
|
|
||||||
|
|
||||||
let end = iterator.span_at_cursor();
|
|
||||||
|
|
||||||
Ok(ClassifiedPipeline::commands(out, start.until(end)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl ExpandSyntax for PipelineShape {
|
impl ExpandSyntax for PipelineShape {
|
||||||
type Output = ClassifiedPipeline;
|
type Output = ClassifiedPipeline;
|
||||||
|
|
||||||
@ -967,61 +638,6 @@ pub enum CommandHeadKind {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct CommandHeadShape;
|
pub struct CommandHeadShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for CommandHeadShape {
|
|
||||||
type Info = CommandHeadKind;
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<CommandHeadKind, ShellError> {
|
|
||||||
// If we don't ultimately find a token, roll back
|
|
||||||
token_nodes.atomic(|token_nodes| {
|
|
||||||
// First, take a look at the next token
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"command head",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
match &atom.unspanned {
|
|
||||||
// If the head is an explicit external command (^cmd), color it as an external command
|
|
||||||
UnspannedAtomicToken::ExternalCommand { .. } => {
|
|
||||||
shapes.push(FlatShape::ExternalCommand.spanned(atom.span));
|
|
||||||
Ok(CommandHeadKind::External)
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the head is a word, it depends on whether it matches a registered internal command
|
|
||||||
UnspannedAtomicToken::Word { text } => {
|
|
||||||
let name = text.slice(context.source);
|
|
||||||
|
|
||||||
if context.registry.has(name) {
|
|
||||||
// If the registry has the command, color it as an internal command
|
|
||||||
shapes.push(FlatShape::InternalCommand.spanned(text));
|
|
||||||
let command = context.registry.expect_command(name);
|
|
||||||
Ok(CommandHeadKind::Internal(command.signature()))
|
|
||||||
} else {
|
|
||||||
// Otherwise, color it as an external command
|
|
||||||
shapes.push(FlatShape::ExternalCommand.spanned(text));
|
|
||||||
Ok(CommandHeadKind::External)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, we're not actually looking at a command
|
|
||||||
_ => Err(ShellError::syntax_error(
|
|
||||||
"No command at the head".spanned(atom.span),
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for CommandHeadShape {
|
impl FallibleColorSyntax for CommandHeadShape {
|
||||||
type Info = CommandHeadKind;
|
type Info = CommandHeadKind;
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -1060,8 +676,8 @@ impl FallibleColorSyntax for CommandHeadShape {
|
|||||||
if context.registry.has(name) {
|
if context.registry.has(name) {
|
||||||
// If the registry has the command, color it as an internal command
|
// If the registry has the command, color it as an internal command
|
||||||
token_nodes.color_shape(FlatShape::InternalCommand.spanned(text));
|
token_nodes.color_shape(FlatShape::InternalCommand.spanned(text));
|
||||||
let command = context.registry.expect_command(name);
|
let signature = context.registry.get(name).unwrap();
|
||||||
Ok(CommandHeadKind::Internal(command.signature()))
|
Ok(CommandHeadKind::Internal(signature))
|
||||||
} else {
|
} else {
|
||||||
// Otherwise, color it as an external command
|
// Otherwise, color it as an external command
|
||||||
token_nodes.color_shape(FlatShape::ExternalCommand.spanned(text));
|
token_nodes.color_shape(FlatShape::ExternalCommand.spanned(text));
|
||||||
@ -1100,8 +716,8 @@ impl ExpandSyntax for CommandHeadShape {
|
|||||||
UnspannedToken::Bare => {
|
UnspannedToken::Bare => {
|
||||||
let name = token_span.slice(context.source);
|
let name = token_span.slice(context.source);
|
||||||
if context.registry.has(name) {
|
if context.registry.has(name) {
|
||||||
let command = context.registry.expect_command(name);
|
let signature = context.registry.get(name).unwrap();
|
||||||
CommandSignature::Internal(command.spanned(token_span))
|
CommandSignature::Internal(signature.spanned(token_span))
|
||||||
} else {
|
} else {
|
||||||
CommandSignature::External(token_span)
|
CommandSignature::External(token_span)
|
||||||
}
|
}
|
||||||
@ -1162,9 +778,8 @@ impl ExpandSyntax for ClassifiedCommandShape {
|
|||||||
external_command(iterator, context, name_str.tagged(outer))
|
external_command(iterator, context, name_str.tagged(outer))
|
||||||
}
|
}
|
||||||
|
|
||||||
CommandSignature::Internal(command) => {
|
CommandSignature::Internal(signature) => {
|
||||||
let tail =
|
let tail = parse_command_tail(&signature.item, &context, iterator, signature.span)?;
|
||||||
parse_command_tail(&command.signature(), &context, iterator, command.span)?;
|
|
||||||
|
|
||||||
let (positional, named) = match tail {
|
let (positional, named) = match tail {
|
||||||
None => (None, None),
|
None => (None, None),
|
||||||
@ -1181,9 +796,9 @@ impl ExpandSyntax for ClassifiedCommandShape {
|
|||||||
};
|
};
|
||||||
|
|
||||||
Ok(ClassifiedCommand::Internal(InternalCommand::new(
|
Ok(ClassifiedCommand::Internal(InternalCommand::new(
|
||||||
command.item.name().to_string(),
|
signature.item.name.clone(),
|
||||||
Tag {
|
Tag {
|
||||||
span: command.span,
|
span: signature.span,
|
||||||
anchor: None,
|
anchor: None,
|
||||||
},
|
},
|
||||||
call,
|
call,
|
||||||
@ -1196,46 +811,6 @@ impl ExpandSyntax for ClassifiedCommandShape {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct InternalCommandHeadShape;
|
pub struct InternalCommandHeadShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for InternalCommandHeadShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
_context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let peeked_head = token_nodes.peek_non_ws().not_eof("command head4");
|
|
||||||
|
|
||||||
let peeked_head = match peeked_head {
|
|
||||||
Err(_) => return Ok(()),
|
|
||||||
Ok(peeked_head) => peeked_head,
|
|
||||||
};
|
|
||||||
|
|
||||||
let _expr = match peeked_head.node {
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
span,
|
|
||||||
}) => shapes.push(FlatShape::Word.spanned(*span)),
|
|
||||||
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::String(_inner_tag),
|
|
||||||
span,
|
|
||||||
}) => shapes.push(FlatShape::String.spanned(*span)),
|
|
||||||
|
|
||||||
_node => shapes.push(FlatShape::Error.spanned(peeked_head.node.span())),
|
|
||||||
};
|
|
||||||
|
|
||||||
peeked_head.commit();
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for InternalCommandHeadShape {
|
impl FallibleColorSyntax for InternalCommandHeadShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -1384,38 +959,6 @@ fn parse_single_node_skipping_ws<'a, 'b, T>(
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct WhitespaceShape;
|
pub struct WhitespaceShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for WhitespaceShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
_context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let peeked = token_nodes.peek_any().not_eof("whitespace");
|
|
||||||
|
|
||||||
let peeked = match peeked {
|
|
||||||
Err(_) => return Ok(()),
|
|
||||||
Ok(peeked) => peeked,
|
|
||||||
};
|
|
||||||
|
|
||||||
let _tag = match peeked.node {
|
|
||||||
TokenNode::Whitespace(span) => shapes.push(FlatShape::Whitespace.spanned(*span)),
|
|
||||||
|
|
||||||
_other => return Ok(()),
|
|
||||||
};
|
|
||||||
|
|
||||||
peeked.commit();
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for WhitespaceShape {
|
impl FallibleColorSyntax for WhitespaceShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -1556,33 +1099,6 @@ impl ExpandSyntax for MaybeSpaceShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl ColorSyntax for MaybeSpaceShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
_context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Self::Info {
|
|
||||||
let peeked = token_nodes.peek_any().not_eof("whitespace");
|
|
||||||
|
|
||||||
let peeked = match peeked {
|
|
||||||
Err(_) => return,
|
|
||||||
Ok(peeked) => peeked,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let TokenNode::Whitespace(span) = peeked.node {
|
|
||||||
peeked.commit();
|
|
||||||
shapes.push(FlatShape::Whitespace.spanned(*span));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl ColorSyntax for MaybeSpaceShape {
|
impl ColorSyntax for MaybeSpaceShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -1614,36 +1130,6 @@ impl ColorSyntax for MaybeSpaceShape {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct SpaceShape;
|
pub struct SpaceShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for SpaceShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
_context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
|
||||||
|
|
||||||
match peeked.node {
|
|
||||||
TokenNode::Whitespace(span) => {
|
|
||||||
peeked.commit();
|
|
||||||
shapes.push(FlatShape::Whitespace.spanned(*span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
other => Err(ShellError::type_error(
|
|
||||||
"whitespace",
|
|
||||||
other.spanned_type_name(),
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for SpaceShape {
|
impl FallibleColorSyntax for SpaceShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -1717,38 +1203,6 @@ fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expressi
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct CommandShape;
|
pub struct CommandShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl ColorSyntax for CommandShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) {
|
|
||||||
let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
match kind {
|
|
||||||
Err(_) => {
|
|
||||||
// We didn't find a command, so we'll have to fall back to parsing this pipeline part
|
|
||||||
// as a blob of undifferentiated expressions
|
|
||||||
color_syntax(&ExpressionListShape, token_nodes, context, shapes);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(CommandHeadKind::External) => {
|
|
||||||
color_syntax(&ExternalTokensShape, token_nodes, context, shapes);
|
|
||||||
}
|
|
||||||
Ok(CommandHeadKind::Internal(signature)) => {
|
|
||||||
color_syntax_with(&CommandTailShape, &signature, token_nodes, context, shapes);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl ColorSyntax for CommandShape {
|
impl ColorSyntax for CommandShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
@ -1,74 +1,20 @@
|
|||||||
use crate::errors::ShellError;
|
use crate::{
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
use crate::parser::hir::syntax_shape::FlatShape;
|
|
||||||
use crate::parser::{
|
|
||||||
hir,
|
hir,
|
||||||
hir::syntax_shape::{
|
hir::syntax_shape::{
|
||||||
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
|
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
|
||||||
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
|
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
|
||||||
ExpressionListShape, FallibleColorSyntax, MemberShape, ParseError, PathTailShape,
|
ExpressionListShape, FallibleColorSyntax, MemberShape, PathTailShape, PathTailSyntax,
|
||||||
PathTailSyntax, VariablePathShape,
|
VariablePathShape,
|
||||||
},
|
},
|
||||||
hir::tokens_iterator::TokensIterator,
|
hir::tokens_iterator::TokensIterator,
|
||||||
parse::token_tree::Delimiter,
|
parse::token_tree::Delimiter,
|
||||||
};
|
};
|
||||||
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_source::Span;
|
use nu_source::Span;
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
use nu_source::Spanned;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct AnyBlockShape;
|
pub struct AnyBlockShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for AnyBlockShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let block = token_nodes.peek_non_ws().not_eof("block");
|
|
||||||
|
|
||||||
let block = match block {
|
|
||||||
Err(_) => return Ok(()),
|
|
||||||
Ok(block) => block,
|
|
||||||
};
|
|
||||||
|
|
||||||
// is it just a block?
|
|
||||||
let block = block.node.as_block();
|
|
||||||
|
|
||||||
match block {
|
|
||||||
// If so, color it as a block
|
|
||||||
Some((children, spans)) => {
|
|
||||||
let mut token_nodes = TokensIterator::new(
|
|
||||||
children.item,
|
|
||||||
children.span,
|
|
||||||
context.source.clone(),
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
color_syntax_with(
|
|
||||||
&DelimitedShape,
|
|
||||||
&(Delimiter::Brace, spans.0, spans.1),
|
|
||||||
&mut token_nodes,
|
|
||||||
context,
|
|
||||||
shapes,
|
|
||||||
);
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, look for a shorthand block. If none found, fail
|
|
||||||
color_fallible_syntax(&ShorthandBlock, token_nodes, context, shapes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for AnyBlockShape {
|
impl FallibleColorSyntax for AnyBlockShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -149,39 +95,6 @@ impl ExpandExpression for AnyBlockShape {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ShorthandBlock;
|
pub struct ShorthandBlock;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for ShorthandBlock {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
// Try to find a shorthand head. If none found, fail
|
|
||||||
color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
// Check to see whether there's any continuation after the head expression
|
|
||||||
let result =
|
|
||||||
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
// if no continuation was found, we're done
|
|
||||||
Err(_) => break,
|
|
||||||
// if a continuation was found, look for another one
|
|
||||||
Ok(_) => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for ShorthandBlock {
|
impl FallibleColorSyntax for ShorthandBlock {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -239,52 +152,6 @@ impl ExpandExpression for ShorthandBlock {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ShorthandPath;
|
pub struct ShorthandPath;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for ShorthandPath {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes.atomic(|token_nodes| {
|
|
||||||
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
match variable {
|
|
||||||
Ok(_) => {
|
|
||||||
// if it's a variable path, that's the head part
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(_) => {
|
|
||||||
// otherwise, we'll try to find a member path
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// look for a member (`<member>` -> `$it.<member>`)
|
|
||||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
|
||||||
|
|
||||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
|
||||||
// like any other path.
|
|
||||||
let tail = color_fallible_syntax(&PathTailShape, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
match tail {
|
|
||||||
Ok(_) => {}
|
|
||||||
Err(_) => {
|
|
||||||
// It's ok if there's no path tail; a single member is sufficient
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for ShorthandPath {
|
impl FallibleColorSyntax for ShorthandPath {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -376,99 +243,6 @@ impl ExpandExpression for ShorthandPath {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ShorthandHeadShape;
|
pub struct ShorthandHeadShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for ShorthandHeadShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
_context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
use crate::parser::parse::token_tree::TokenNode;
|
|
||||||
use crate::parser::parse::tokens::{Token, UnspannedToken};
|
|
||||||
use nu_source::SpannedItem;
|
|
||||||
|
|
||||||
// A shorthand path must not be at EOF
|
|
||||||
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path head")?;
|
|
||||||
|
|
||||||
match peeked.node {
|
|
||||||
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
span,
|
|
||||||
}) => {
|
|
||||||
peeked.commit();
|
|
||||||
shapes.push(FlatShape::BareMember.spanned(*span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the head of a shorthand path is a string, it expands to `$it."some string"`
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::String(_),
|
|
||||||
span: outer,
|
|
||||||
}) => {
|
|
||||||
peeked.commit();
|
|
||||||
shapes.push(FlatShape::StringMember.spanned(*outer));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
other => Err(ShellError::type_error(
|
|
||||||
"shorthand head",
|
|
||||||
other.spanned_type_name(),
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for ShorthandHeadShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
_context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
// A shorthand path must not be at EOF
|
|
||||||
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path head")?;
|
|
||||||
|
|
||||||
match peeked.node {
|
|
||||||
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
|
|
||||||
TokenNode::Token(Spanned {
|
|
||||||
item: UnspannedToken::Bare,
|
|
||||||
span,
|
|
||||||
}) => {
|
|
||||||
peeked.commit();
|
|
||||||
shapes.push(FlatShape::BareMember.spanned(*span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the head of a shorthand path is a string, it expands to `$it."some string"`
|
|
||||||
TokenNode::Token(Spanned {
|
|
||||||
item: UnspannedToken::String(_),
|
|
||||||
span: outer,
|
|
||||||
}) => {
|
|
||||||
peeked.commit();
|
|
||||||
shapes.push(FlatShape::StringMember.spanned(*outer));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
other => Err(ShellError::type_error(
|
|
||||||
"shorthand head",
|
|
||||||
other.tagged_type_name(),
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandExpression for ShorthandHeadShape {
|
impl ExpandExpression for ShorthandHeadShape {
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"shorthand head"
|
"shorthand head"
|
@ -4,22 +4,23 @@ pub(crate) mod file_path;
|
|||||||
pub(crate) mod list;
|
pub(crate) mod list;
|
||||||
pub(crate) mod number;
|
pub(crate) mod number;
|
||||||
pub(crate) mod pattern;
|
pub(crate) mod pattern;
|
||||||
|
pub(crate) mod range;
|
||||||
pub(crate) mod string;
|
pub(crate) mod string;
|
||||||
pub(crate) mod unit;
|
pub(crate) mod unit;
|
||||||
pub(crate) mod variable_path;
|
pub(crate) mod variable_path;
|
||||||
|
|
||||||
use crate::parser::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom,
|
color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom,
|
||||||
expand_delimited_square, expand_expr, expand_syntax, BareShape, ColorableDotShape, DotShape,
|
expand_delimited_square, expand_expr, expand_syntax, BareShape, ColorableDotShape, DotShape,
|
||||||
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation,
|
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation,
|
||||||
ExpressionContinuationShape, FallibleColorSyntax, FlatShape, ParseError, UnspannedAtomicToken,
|
ExpressionContinuationShape, FallibleColorSyntax, FlatShape, UnspannedAtomicToken,
|
||||||
};
|
};
|
||||||
use crate::parser::{
|
use crate::{
|
||||||
hir,
|
hir,
|
||||||
hir::{Expression, TokensIterator},
|
hir::{Expression, TokensIterator},
|
||||||
};
|
};
|
||||||
use crate::prelude::*;
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_source::Spanned;
|
use nu_source::{HasSpan, Span, Spanned, SpannedItem, Tag};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
@ -42,34 +43,6 @@ impl ExpandExpression for AnyExpressionShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for AnyExpressionShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
// Look for an expression at the cursor
|
|
||||||
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?;
|
|
||||||
|
|
||||||
match continue_coloring_expression(token_nodes, context, shapes) {
|
|
||||||
Err(_) => {
|
|
||||||
// it's fine for there to be no continuation
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(()) => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for AnyExpressionShape {
|
impl FallibleColorSyntax for AnyExpressionShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -127,32 +100,6 @@ pub(crate) fn continue_expression(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
pub(crate) fn continue_coloring_expression(
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
// if there's not even one expression continuation, fail
|
|
||||||
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
// Check to see whether there's any continuation after the head expression
|
|
||||||
let result =
|
|
||||||
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Err(_) => {
|
|
||||||
// We already saw one continuation, so just return
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(_) => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub(crate) fn continue_coloring_expression(
|
pub(crate) fn continue_coloring_expression(
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
context: &ExpandContext,
|
||||||
@ -220,66 +167,6 @@ impl ExpandExpression for AnyExpressionStartShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for AnyExpressionStartShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = token_nodes.spanned(|token_nodes| {
|
|
||||||
expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"expression",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Spanned {
|
|
||||||
item: Err(_err),
|
|
||||||
span,
|
|
||||||
} => {
|
|
||||||
shapes.push(FlatShape::Error.spanned(span));
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
Spanned {
|
|
||||||
item: Ok(value), ..
|
|
||||||
} => value,
|
|
||||||
};
|
|
||||||
|
|
||||||
match &atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Size { number, unit } => shapes.push(
|
|
||||||
FlatShape::Size {
|
|
||||||
number: number.span(),
|
|
||||||
unit: unit.span.into(),
|
|
||||||
}
|
|
||||||
.spanned(atom.span),
|
|
||||||
),
|
|
||||||
|
|
||||||
UnspannedAtomicToken::SquareDelimited { nodes, spans } => {
|
|
||||||
color_delimited_square(*spans, &nodes, atom.span.into(), context, shapes)
|
|
||||||
}
|
|
||||||
|
|
||||||
UnspannedAtomicToken::Word { .. } => {
|
|
||||||
shapes.push(FlatShape::Word.spanned(atom.span));
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => atom.color_tokens(shapes),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for AnyExpressionStartShape {
|
impl FallibleColorSyntax for AnyExpressionStartShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -350,64 +237,6 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct BareTailShape;
|
pub struct BareTailShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for BareTailShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let len = shapes.len();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let word = color_fallible_syntax_with(
|
|
||||||
&BareShape,
|
|
||||||
&FlatShape::Word,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
shapes,
|
|
||||||
);
|
|
||||||
|
|
||||||
match word {
|
|
||||||
// if a word was found, continue
|
|
||||||
Ok(_) => continue,
|
|
||||||
// if a word wasn't found, try to find a dot
|
|
||||||
Err(_) => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// try to find a dot
|
|
||||||
let dot = color_fallible_syntax_with(
|
|
||||||
&ColorableDotShape,
|
|
||||||
&FlatShape::Word,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
shapes,
|
|
||||||
);
|
|
||||||
|
|
||||||
match dot {
|
|
||||||
// if a dot was found, try to find another word
|
|
||||||
Ok(_) => continue,
|
|
||||||
// otherwise, we're done
|
|
||||||
Err(_) => break,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if shapes.len() > len {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(ShellError::syntax_error(
|
|
||||||
"No tokens matched BareTailShape".spanned_unknown(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for BareTailShape {
|
impl FallibleColorSyntax for BareTailShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
@ -1,15 +1,20 @@
|
|||||||
use crate::parser::hir::syntax_shape::{
|
use crate::hir::syntax_shape::FlatShape;
|
||||||
|
use crate::hir::syntax_shape::{
|
||||||
expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape,
|
expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape,
|
||||||
BarePatternShape, ExpandContext, UnitShape, UnitSyntax,
|
BarePatternShape, ExpandContext, UnitShape, UnitSyntax,
|
||||||
};
|
};
|
||||||
use crate::parser::{
|
use crate::parse::operator::EvaluationOperator;
|
||||||
|
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||||
|
use crate::parse::tokens::UnspannedToken;
|
||||||
|
use crate::parse::unit::Unit;
|
||||||
|
use crate::{
|
||||||
hir,
|
hir,
|
||||||
hir::{Expression, RawNumber, TokensIterator},
|
hir::{Expression, RawNumber, TokensIterator},
|
||||||
parse::flag::{Flag, FlagKind},
|
parse::flag::{Flag, FlagKind},
|
||||||
DelimitedNode, Delimiter, FlatShape, TokenNode, Unit, UnspannedToken,
|
|
||||||
};
|
};
|
||||||
use crate::prelude::*;
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_source::Spanned;
|
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
||||||
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -48,23 +53,36 @@ pub enum UnspannedAtomicToken<'tokens> {
|
|||||||
Word {
|
Word {
|
||||||
text: Span,
|
text: Span,
|
||||||
},
|
},
|
||||||
#[allow(unused)]
|
|
||||||
Dot {
|
|
||||||
text: Span,
|
|
||||||
},
|
|
||||||
SquareDelimited {
|
SquareDelimited {
|
||||||
spans: (Span, Span),
|
spans: (Span, Span),
|
||||||
nodes: &'tokens Vec<TokenNode>,
|
nodes: &'tokens Vec<TokenNode>,
|
||||||
},
|
},
|
||||||
|
#[allow(unused)]
|
||||||
|
RoundDelimited {
|
||||||
|
spans: (Span, Span),
|
||||||
|
nodes: &'tokens Vec<TokenNode>,
|
||||||
|
},
|
||||||
ShorthandFlag {
|
ShorthandFlag {
|
||||||
name: Span,
|
name: Span,
|
||||||
},
|
},
|
||||||
Operator {
|
CompareOperator {
|
||||||
|
text: Span,
|
||||||
|
},
|
||||||
|
Dot {
|
||||||
|
text: Span,
|
||||||
|
},
|
||||||
|
DotDot {
|
||||||
text: Span,
|
text: Span,
|
||||||
},
|
},
|
||||||
Whitespace {
|
Whitespace {
|
||||||
text: Span,
|
text: Span,
|
||||||
},
|
},
|
||||||
|
Separator {
|
||||||
|
text: Span,
|
||||||
|
},
|
||||||
|
Comment {
|
||||||
|
body: Span,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tokens> UnspannedAtomicToken<'tokens> {
|
impl<'tokens> UnspannedAtomicToken<'tokens> {
|
||||||
@ -76,15 +94,24 @@ impl<'tokens> UnspannedAtomicToken<'tokens> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'tokens> ShellTypeName for AtomicToken<'tokens> {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
self.unspanned.type_name()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'tokens> ShellTypeName for UnspannedAtomicToken<'tokens> {
|
impl<'tokens> ShellTypeName for UnspannedAtomicToken<'tokens> {
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
match &self {
|
match &self {
|
||||||
UnspannedAtomicToken::Eof { .. } => "eof",
|
UnspannedAtomicToken::Eof { .. } => "eof",
|
||||||
UnspannedAtomicToken::Error { .. } => "error",
|
UnspannedAtomicToken::Error { .. } => "error",
|
||||||
UnspannedAtomicToken::Operator { .. } => "operator",
|
UnspannedAtomicToken::CompareOperator { .. } => "compare operator",
|
||||||
UnspannedAtomicToken::ShorthandFlag { .. } => "shorthand flag",
|
UnspannedAtomicToken::ShorthandFlag { .. } => "shorthand flag",
|
||||||
UnspannedAtomicToken::Whitespace { .. } => "whitespace",
|
UnspannedAtomicToken::Whitespace { .. } => "whitespace",
|
||||||
|
UnspannedAtomicToken::Separator { .. } => "separator",
|
||||||
|
UnspannedAtomicToken::Comment { .. } => "comment",
|
||||||
UnspannedAtomicToken::Dot { .. } => "dot",
|
UnspannedAtomicToken::Dot { .. } => "dot",
|
||||||
|
UnspannedAtomicToken::DotDot { .. } => "dotdot",
|
||||||
UnspannedAtomicToken::Number { .. } => "number",
|
UnspannedAtomicToken::Number { .. } => "number",
|
||||||
UnspannedAtomicToken::Size { .. } => "size",
|
UnspannedAtomicToken::Size { .. } => "size",
|
||||||
UnspannedAtomicToken::String { .. } => "string",
|
UnspannedAtomicToken::String { .. } => "string",
|
||||||
@ -95,6 +122,7 @@ impl<'tokens> ShellTypeName for UnspannedAtomicToken<'tokens> {
|
|||||||
UnspannedAtomicToken::GlobPattern { .. } => "file pattern",
|
UnspannedAtomicToken::GlobPattern { .. } => "file pattern",
|
||||||
UnspannedAtomicToken::Word { .. } => "word",
|
UnspannedAtomicToken::Word { .. } => "word",
|
||||||
UnspannedAtomicToken::SquareDelimited { .. } => "array literal",
|
UnspannedAtomicToken::SquareDelimited { .. } => "array literal",
|
||||||
|
UnspannedAtomicToken::RoundDelimited { .. } => "paren delimited",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -105,6 +133,12 @@ pub struct AtomicToken<'tokens> {
|
|||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'tokens> HasSpan for AtomicToken<'tokens> {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'tokens> Deref for AtomicToken<'tokens> {
|
impl<'tokens> Deref for AtomicToken<'tokens> {
|
||||||
type Target = UnspannedAtomicToken<'tokens>;
|
type Target = UnspannedAtomicToken<'tokens>;
|
||||||
|
|
||||||
@ -127,31 +161,18 @@ impl<'tokens> AtomicToken<'tokens> {
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
UnspannedAtomicToken::Error { .. } => {
|
UnspannedAtomicToken::Error { .. } => {
|
||||||
return Err(ParseError::mismatch(
|
return Err(ParseError::mismatch(expected, "error".spanned(self.span)))
|
||||||
expected,
|
|
||||||
"eof atomic token".spanned(self.span),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
UnspannedAtomicToken::Operator { .. } => {
|
UnspannedAtomicToken::RoundDelimited { .. }
|
||||||
return Err(ParseError::mismatch(
|
| UnspannedAtomicToken::CompareOperator { .. }
|
||||||
expected,
|
| UnspannedAtomicToken::ShorthandFlag { .. }
|
||||||
"operator".spanned(self.span),
|
| UnspannedAtomicToken::Whitespace { .. }
|
||||||
))
|
| UnspannedAtomicToken::Separator { .. }
|
||||||
}
|
| UnspannedAtomicToken::Comment { .. }
|
||||||
UnspannedAtomicToken::ShorthandFlag { .. } => {
|
| UnspannedAtomicToken::Dot { .. }
|
||||||
return Err(ParseError::mismatch(
|
| UnspannedAtomicToken::DotDot { .. }
|
||||||
expected,
|
| UnspannedAtomicToken::SquareDelimited { .. } => {
|
||||||
"shorthand flag".spanned(self.span),
|
return Err(ParseError::mismatch(expected, self.spanned_type_name()));
|
||||||
))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Whitespace { .. } => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
expected,
|
|
||||||
"whitespace".spanned(self.span),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Dot { .. } => {
|
|
||||||
return Err(ParseError::mismatch(expected, "dot".spanned(self.span)))
|
|
||||||
}
|
}
|
||||||
UnspannedAtomicToken::Number { number } => {
|
UnspannedAtomicToken::Number { number } => {
|
||||||
Expression::number(number.to_number(context.source), self.span)
|
Expression::number(number.to_number(context.source), self.span)
|
||||||
@ -171,41 +192,17 @@ impl<'tokens> AtomicToken<'tokens> {
|
|||||||
self.span,
|
self.span,
|
||||||
),
|
),
|
||||||
UnspannedAtomicToken::Word { text } => Expression::string(*text, *text),
|
UnspannedAtomicToken::Word { text } => Expression::string(*text, *text),
|
||||||
UnspannedAtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
|
|
||||||
match &self.unspanned {
|
|
||||||
UnspannedAtomicToken::Eof { .. } => "eof",
|
|
||||||
UnspannedAtomicToken::Error { .. } => "error",
|
|
||||||
UnspannedAtomicToken::Operator { .. } => "operator",
|
|
||||||
UnspannedAtomicToken::ShorthandFlag { .. } => "shorthand flag",
|
|
||||||
UnspannedAtomicToken::Whitespace { .. } => "whitespace",
|
|
||||||
UnspannedAtomicToken::Dot { .. } => "dot",
|
|
||||||
UnspannedAtomicToken::Number { .. } => "number",
|
|
||||||
UnspannedAtomicToken::Size { .. } => "size",
|
|
||||||
UnspannedAtomicToken::String { .. } => "string",
|
|
||||||
UnspannedAtomicToken::ItVariable { .. } => "$it",
|
|
||||||
UnspannedAtomicToken::Variable { .. } => "variable",
|
|
||||||
UnspannedAtomicToken::ExternalCommand { .. } => "external command",
|
|
||||||
UnspannedAtomicToken::ExternalWord { .. } => "external word",
|
|
||||||
UnspannedAtomicToken::GlobPattern { .. } => "file pattern",
|
|
||||||
UnspannedAtomicToken::Word { .. } => "word",
|
|
||||||
UnspannedAtomicToken::SquareDelimited { .. } => "array literal",
|
|
||||||
}
|
|
||||||
.spanned(self.span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
|
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
|
||||||
match &self.unspanned {
|
match &self.unspanned {
|
||||||
UnspannedAtomicToken::Eof { .. } => {}
|
UnspannedAtomicToken::Eof { .. } => {}
|
||||||
UnspannedAtomicToken::Error { .. } => {
|
UnspannedAtomicToken::Error { .. } => {
|
||||||
return shapes.push(FlatShape::Error.spanned(self.span))
|
return shapes.push(FlatShape::Error.spanned(self.span))
|
||||||
}
|
}
|
||||||
UnspannedAtomicToken::Operator { .. } => {
|
UnspannedAtomicToken::CompareOperator { .. } => {
|
||||||
return shapes.push(FlatShape::Operator.spanned(self.span));
|
return shapes.push(FlatShape::CompareOperator.spanned(self.span));
|
||||||
}
|
}
|
||||||
UnspannedAtomicToken::ShorthandFlag { .. } => {
|
UnspannedAtomicToken::ShorthandFlag { .. } => {
|
||||||
return shapes.push(FlatShape::ShorthandFlag.spanned(self.span));
|
return shapes.push(FlatShape::ShorthandFlag.spanned(self.span));
|
||||||
@ -301,17 +298,30 @@ impl PrettyDebugWithSource for AtomicToken<'_> {
|
|||||||
b::intersperse_with_source(nodes.iter(), b::space(), source),
|
b::intersperse_with_source(nodes.iter(), b::space(), source),
|
||||||
"]",
|
"]",
|
||||||
),
|
),
|
||||||
|
UnspannedAtomicToken::RoundDelimited { nodes, .. } => b::delimit(
|
||||||
|
"(",
|
||||||
|
b::intersperse_with_source(nodes.iter(), b::space(), source),
|
||||||
|
")",
|
||||||
|
),
|
||||||
UnspannedAtomicToken::ShorthandFlag { name } => {
|
UnspannedAtomicToken::ShorthandFlag { name } => {
|
||||||
atom_kind("shorthand flag", b::key(name.slice(source)))
|
atom_kind("shorthand flag", b::key(name.slice(source)))
|
||||||
}
|
}
|
||||||
UnspannedAtomicToken::Dot { .. } => atom(b::kind("dot")),
|
UnspannedAtomicToken::Dot { .. } => atom(b::kind("dot")),
|
||||||
UnspannedAtomicToken::Operator { text } => {
|
UnspannedAtomicToken::DotDot { .. } => atom(b::kind("dotdot")),
|
||||||
|
UnspannedAtomicToken::CompareOperator { text } => {
|
||||||
atom_kind("operator", b::keyword(text.slice(source)))
|
atom_kind("operator", b::keyword(text.slice(source)))
|
||||||
}
|
}
|
||||||
UnspannedAtomicToken::Whitespace { text } => atom_kind(
|
UnspannedAtomicToken::Whitespace { text } => atom_kind(
|
||||||
"whitespace",
|
"whitespace",
|
||||||
b::description(format!("{:?}", text.slice(source))),
|
b::description(format!("{:?}", text.slice(source))),
|
||||||
),
|
),
|
||||||
|
UnspannedAtomicToken::Separator { text } => atom_kind(
|
||||||
|
"separator",
|
||||||
|
b::description(format!("{:?}", text.slice(source))),
|
||||||
|
),
|
||||||
|
UnspannedAtomicToken::Comment { body } => {
|
||||||
|
atom_kind("comment", b::description(body.slice(source)))
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -327,12 +337,15 @@ pub enum WhitespaceHandling {
|
|||||||
pub struct ExpansionRule {
|
pub struct ExpansionRule {
|
||||||
pub(crate) allow_external_command: bool,
|
pub(crate) allow_external_command: bool,
|
||||||
pub(crate) allow_external_word: bool,
|
pub(crate) allow_external_word: bool,
|
||||||
pub(crate) allow_operator: bool,
|
pub(crate) allow_cmp_operator: bool,
|
||||||
|
pub(crate) allow_eval_operator: bool,
|
||||||
pub(crate) allow_eof: bool,
|
pub(crate) allow_eof: bool,
|
||||||
|
pub(crate) allow_separator: bool,
|
||||||
pub(crate) treat_size_as_word: bool,
|
pub(crate) treat_size_as_word: bool,
|
||||||
pub(crate) separate_members: bool,
|
pub(crate) separate_members: bool,
|
||||||
pub(crate) commit_errors: bool,
|
pub(crate) commit_errors: bool,
|
||||||
pub(crate) whitespace: WhitespaceHandling,
|
pub(crate) whitespace: WhitespaceHandling,
|
||||||
|
pub(crate) allow_comments: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpansionRule {
|
impl ExpansionRule {
|
||||||
@ -340,12 +353,15 @@ impl ExpansionRule {
|
|||||||
ExpansionRule {
|
ExpansionRule {
|
||||||
allow_external_command: false,
|
allow_external_command: false,
|
||||||
allow_external_word: false,
|
allow_external_word: false,
|
||||||
allow_operator: false,
|
allow_eval_operator: false,
|
||||||
|
allow_cmp_operator: false,
|
||||||
allow_eof: false,
|
allow_eof: false,
|
||||||
treat_size_as_word: false,
|
treat_size_as_word: false,
|
||||||
separate_members: false,
|
separate_members: false,
|
||||||
commit_errors: false,
|
commit_errors: false,
|
||||||
|
allow_separator: false,
|
||||||
whitespace: WhitespaceHandling::RejectWhitespace,
|
whitespace: WhitespaceHandling::RejectWhitespace,
|
||||||
|
allow_comments: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -356,11 +372,14 @@ impl ExpansionRule {
|
|||||||
ExpansionRule {
|
ExpansionRule {
|
||||||
allow_external_command: true,
|
allow_external_command: true,
|
||||||
allow_external_word: true,
|
allow_external_word: true,
|
||||||
allow_operator: true,
|
allow_cmp_operator: true,
|
||||||
|
allow_eval_operator: true,
|
||||||
allow_eof: true,
|
allow_eof: true,
|
||||||
separate_members: false,
|
separate_members: false,
|
||||||
treat_size_as_word: false,
|
treat_size_as_word: false,
|
||||||
commit_errors: true,
|
commit_errors: true,
|
||||||
|
allow_separator: true,
|
||||||
|
allow_comments: true,
|
||||||
whitespace: WhitespaceHandling::AllowWhitespace,
|
whitespace: WhitespaceHandling::AllowWhitespace,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -372,14 +391,26 @@ impl ExpansionRule {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
pub fn allow_operator(mut self) -> ExpansionRule {
|
pub fn allow_cmp_operator(mut self) -> ExpansionRule {
|
||||||
self.allow_operator = true;
|
self.allow_cmp_operator = true;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
pub fn no_cmp_operator(mut self) -> ExpansionRule {
|
||||||
|
self.allow_cmp_operator = false;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
pub fn allow_eval_operator(mut self) -> ExpansionRule {
|
||||||
|
self.allow_eval_operator = true;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
pub fn no_operator(mut self) -> ExpansionRule {
|
pub fn no_operator(mut self) -> ExpansionRule {
|
||||||
self.allow_operator = false;
|
self.allow_eval_operator = false;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -436,6 +467,30 @@ impl ExpansionRule {
|
|||||||
self.whitespace = WhitespaceHandling::RejectWhitespace;
|
self.whitespace = WhitespaceHandling::RejectWhitespace;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
pub fn allow_separator(mut self) -> ExpansionRule {
|
||||||
|
self.allow_separator = true;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
pub fn reject_separator(mut self) -> ExpansionRule {
|
||||||
|
self.allow_separator = false;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
pub fn allow_comments(mut self) -> ExpansionRule {
|
||||||
|
self.allow_comments = true;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
pub fn reject_comments(mut self) -> ExpansionRule {
|
||||||
|
self.allow_comments = false;
|
||||||
|
self
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_atom<'me, 'content>(
|
pub fn expand_atom<'me, 'content>(
|
||||||
@ -574,6 +629,17 @@ fn expand_atom_inner<'me, 'content>(
|
|||||||
.into_atomic_token(error.span));
|
.into_atomic_token(error.span));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TokenNode::Separator(span) if rule.allow_separator => {
|
||||||
|
peeked.commit();
|
||||||
|
return Ok(UnspannedAtomicToken::Separator { text: *span }.into_atomic_token(span));
|
||||||
|
}
|
||||||
|
|
||||||
|
TokenNode::Comment(comment) if rule.allow_comments => {
|
||||||
|
peeked.commit();
|
||||||
|
return Ok(UnspannedAtomicToken::Comment { body: comment.text }
|
||||||
|
.into_atomic_token(comment.span()));
|
||||||
|
}
|
||||||
|
|
||||||
// [ ... ]
|
// [ ... ]
|
||||||
TokenNode::Delimited(Spanned {
|
TokenNode::Delimited(Spanned {
|
||||||
item:
|
item:
|
||||||
@ -645,8 +711,16 @@ fn expand_atom_inner<'me, 'content>(
|
|||||||
// First, the error cases. Each error case corresponds to a expansion rule
|
// First, the error cases. Each error case corresponds to a expansion rule
|
||||||
// flag that can be used to allow the case
|
// flag that can be used to allow the case
|
||||||
|
|
||||||
// rule.allow_operator
|
// rule.allow_cmp_operator
|
||||||
UnspannedToken::Operator(_) if !rule.allow_operator => return Err(err.error()),
|
UnspannedToken::CompareOperator(_) if !rule.allow_cmp_operator => {
|
||||||
|
return Err(err.error())
|
||||||
|
}
|
||||||
|
|
||||||
|
// rule.allow_eval_operator
|
||||||
|
UnspannedToken::EvaluationOperator(_) if !rule.allow_eval_operator => {
|
||||||
|
return Err(err.error())
|
||||||
|
}
|
||||||
|
|
||||||
// rule.allow_external_command
|
// rule.allow_external_command
|
||||||
UnspannedToken::ExternalCommand(_) if !rule.allow_external_command => {
|
UnspannedToken::ExternalCommand(_) if !rule.allow_external_command => {
|
||||||
return Err(ParseError::mismatch(
|
return Err(ParseError::mismatch(
|
||||||
@ -665,8 +739,15 @@ fn expand_atom_inner<'me, 'content>(
|
|||||||
UnspannedToken::Number(number) => {
|
UnspannedToken::Number(number) => {
|
||||||
UnspannedAtomicToken::Number { number }.into_atomic_token(token_span)
|
UnspannedAtomicToken::Number { number }.into_atomic_token(token_span)
|
||||||
}
|
}
|
||||||
UnspannedToken::Operator(_) => {
|
UnspannedToken::CompareOperator(_) => {
|
||||||
UnspannedAtomicToken::Operator { text: token_span }.into_atomic_token(token_span)
|
UnspannedAtomicToken::CompareOperator { text: token_span }
|
||||||
|
.into_atomic_token(token_span)
|
||||||
|
}
|
||||||
|
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => {
|
||||||
|
UnspannedAtomicToken::Dot { text: token_span }.into_atomic_token(token_span)
|
||||||
|
}
|
||||||
|
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => {
|
||||||
|
UnspannedAtomicToken::DotDot { text: token_span }.into_atomic_token(token_span)
|
||||||
}
|
}
|
||||||
UnspannedToken::String(body) => {
|
UnspannedToken::String(body) => {
|
||||||
UnspannedAtomicToken::String { body }.into_atomic_token(token_span)
|
UnspannedAtomicToken::String { body }.into_atomic_token(token_span)
|
@ -1,10 +1,9 @@
|
|||||||
use crate::parser::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode,
|
color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode,
|
||||||
};
|
};
|
||||||
use crate::parser::{hir, hir::TokensIterator, Delimiter, FlatShape};
|
use crate::{hir, hir::TokensIterator, Delimiter, FlatShape};
|
||||||
use crate::prelude::*;
|
use nu_errors::ParseError;
|
||||||
#[cfg(not(coloring_in_tokens))]
|
use nu_source::{Span, SpannedItem, Tag};
|
||||||
use nu_source::Spanned;
|
|
||||||
|
|
||||||
pub fn expand_delimited_square(
|
pub fn expand_delimited_square(
|
||||||
children: &Vec<TokenNode>,
|
children: &Vec<TokenNode>,
|
||||||
@ -21,21 +20,6 @@ pub fn expand_delimited_square(
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
pub fn color_delimited_square(
|
|
||||||
(open, close): (Span, Span),
|
|
||||||
children: &Vec<TokenNode>,
|
|
||||||
span: Span,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) {
|
|
||||||
shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open));
|
|
||||||
let mut tokens = TokensIterator::new(&children, span, context.source.clone(), false);
|
|
||||||
let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes);
|
|
||||||
shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn color_delimited_square(
|
pub fn color_delimited_square(
|
||||||
(open, close): (Span, Span),
|
(open, close): (Span, Span),
|
||||||
token_nodes: &mut TokensIterator,
|
token_nodes: &mut TokensIterator,
|
||||||
@ -50,24 +34,6 @@ pub fn color_delimited_square(
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct DelimitedShape;
|
pub struct DelimitedShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl ColorSyntax for DelimitedShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = (Delimiter, Span, Span);
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
(delimiter, open, close): &(Delimiter, Span, Span),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Self::Info {
|
|
||||||
shapes.push(FlatShape::OpenDelimiter(*delimiter).spanned(*open));
|
|
||||||
color_syntax(&ExpressionListShape, token_nodes, context, shapes);
|
|
||||||
shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl ColorSyntax for DelimitedShape {
|
impl ColorSyntax for DelimitedShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = (Delimiter, Span, Span);
|
type Input = (Delimiter, Span, Span);
|
@ -1,56 +1,16 @@
|
|||||||
use crate::parser::hir::syntax_shape::expression::atom::{
|
use crate::hir::syntax_shape::expression::atom::{
|
||||||
expand_atom, ExpansionRule, UnspannedAtomicToken,
|
expand_atom, ExpansionRule, UnspannedAtomicToken,
|
||||||
};
|
};
|
||||||
use crate::parser::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape,
|
expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape,
|
||||||
ParseError,
|
|
||||||
};
|
};
|
||||||
use crate::parser::{hir, hir::TokensIterator};
|
use crate::{hir, hir::TokensIterator};
|
||||||
use crate::prelude::*;
|
use nu_errors::{ParseError, ShellError};
|
||||||
|
use nu_source::SpannedItem;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct FilePathShape;
|
pub struct FilePathShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for FilePathShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<nu_source::Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"file path",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Err(_) => return Ok(()),
|
|
||||||
Ok(atom) => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
match &atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Word { .. }
|
|
||||||
| UnspannedAtomicToken::String { .. }
|
|
||||||
| UnspannedAtomicToken::Number { .. }
|
|
||||||
| UnspannedAtomicToken::Size { .. } => {
|
|
||||||
shapes.push(FlatShape::Path.spanned(atom.span));
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => atom.color_tokens(shapes),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for FilePathShape {
|
impl FallibleColorSyntax for FilePathShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -102,10 +62,17 @@ impl ExpandExpression for FilePathShape {
|
|||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
context: &ExpandContext,
|
||||||
) -> Result<hir::Expression, ParseError> {
|
) -> Result<hir::Expression, ParseError> {
|
||||||
let atom = expand_atom(token_nodes, "file path", context, ExpansionRule::new())?;
|
let atom = expand_atom(
|
||||||
|
token_nodes,
|
||||||
|
"file path",
|
||||||
|
context,
|
||||||
|
ExpansionRule::new().allow_external_word(),
|
||||||
|
)?;
|
||||||
|
|
||||||
match atom.unspanned {
|
match atom.unspanned {
|
||||||
UnspannedAtomicToken::Word { text: body } | UnspannedAtomicToken::String { body } => {
|
UnspannedAtomicToken::Word { text: body }
|
||||||
|
| UnspannedAtomicToken::ExternalWord { text: body }
|
||||||
|
| UnspannedAtomicToken::String { body } => {
|
||||||
let path = expand_file_path(body.slice(context.source), context);
|
let path = expand_file_path(body.slice(context.source), context);
|
||||||
return Ok(hir::Expression::file_path(path, atom.span));
|
return Ok(hir::Expression::file_path(path, atom.span));
|
||||||
}
|
}
|
@ -1,7 +1,4 @@
|
|||||||
use crate::errors::ParseError;
|
use crate::{
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
use crate::parser::hir::syntax_shape::FlatShape;
|
|
||||||
use crate::parser::{
|
|
||||||
hir,
|
hir,
|
||||||
hir::syntax_shape::{
|
hir::syntax_shape::{
|
||||||
color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced,
|
color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced,
|
||||||
@ -10,6 +7,7 @@ use crate::parser::{
|
|||||||
},
|
},
|
||||||
hir::TokensIterator,
|
hir::TokensIterator,
|
||||||
};
|
};
|
||||||
|
use nu_errors::ParseError;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -76,77 +74,6 @@ impl ExpandSyntax for ExpressionListShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl ColorSyntax for ExpressionListShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
/// The intent of this method is to fully color an expression list shape infallibly.
|
|
||||||
/// This means that if we can't expand a token into an expression, we fall back to
|
|
||||||
/// a simpler coloring strategy.
|
|
||||||
///
|
|
||||||
/// This would apply to something like `where x >`, which includes an incomplete
|
|
||||||
/// binary operator. Since we will fail to process it as a binary operator, we'll
|
|
||||||
/// fall back to a simpler coloring and move on.
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) {
|
|
||||||
// We encountered a parsing error and will continue with simpler coloring ("backoff
|
|
||||||
// coloring mode")
|
|
||||||
let mut backoff = false;
|
|
||||||
|
|
||||||
// Consume any leading whitespace
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
loop {
|
|
||||||
// If we reached the very end of the token stream, we're done
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if backoff {
|
|
||||||
let len = shapes.len();
|
|
||||||
|
|
||||||
// If we previously encountered a parsing error, use backoff coloring mode
|
|
||||||
color_syntax(&SimplestExpression, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
if len == shapes.len() && !token_nodes.at_end() {
|
|
||||||
// This should never happen, but if it does, a panic is better than an infinite loop
|
|
||||||
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Try to color the head of the stream as an expression
|
|
||||||
match color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) {
|
|
||||||
// If no expression was found, switch to backoff coloring mode
|
|
||||||
Err(_) => {
|
|
||||||
backoff = true;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Ok(_) => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If an expression was found, consume a space
|
|
||||||
match color_fallible_syntax(&SpaceShape, token_nodes, context, shapes) {
|
|
||||||
Err(_) => {
|
|
||||||
// If no space was found, we're either at the end or there's an error.
|
|
||||||
// Either way, switch to backoff coloring mode. If we're at the end
|
|
||||||
// it won't have any consequences.
|
|
||||||
backoff = true;
|
|
||||||
}
|
|
||||||
Ok(_) => {
|
|
||||||
// Otherwise, move on to the next expression
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl ColorSyntax for ExpressionListShape {
|
impl ColorSyntax for ExpressionListShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -223,35 +150,6 @@ impl ColorSyntax for ExpressionListShape {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct BackoffColoringMode;
|
pub struct BackoffColoringMode;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl ColorSyntax for BackoffColoringMode {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &Self::Input,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Self::Info {
|
|
||||||
loop {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let len = shapes.len();
|
|
||||||
color_syntax(&SimplestExpression, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
if len == shapes.len() && !token_nodes.at_end() {
|
|
||||||
// This shouldn't happen, but if it does, a panic is better than an infinite loop
|
|
||||||
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, shapes);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl ColorSyntax for BackoffColoringMode {
|
impl ColorSyntax for BackoffColoringMode {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -288,33 +186,6 @@ impl ColorSyntax for BackoffColoringMode {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct SimplestExpression;
|
pub struct SimplestExpression;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl ColorSyntax for SimplestExpression {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"any token",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
);
|
|
||||||
|
|
||||||
match atom {
|
|
||||||
Err(_) => {}
|
|
||||||
Ok(atom) => atom.color_tokens(shapes),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl ColorSyntax for SimplestExpression {
|
impl ColorSyntax for SimplestExpression {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
@ -1,15 +1,15 @@
|
|||||||
use crate::parser::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule,
|
expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule,
|
||||||
FallibleColorSyntax, FlatShape, ParseError, TestSyntax,
|
FallibleColorSyntax, FlatShape, TestSyntax,
|
||||||
};
|
};
|
||||||
use crate::parser::hir::tokens_iterator::Peeked;
|
use crate::hir::tokens_iterator::Peeked;
|
||||||
use crate::parser::{
|
use crate::parse::tokens::UnspannedToken;
|
||||||
|
use crate::{
|
||||||
hir,
|
hir,
|
||||||
hir::{RawNumber, TokensIterator},
|
hir::{RawNumber, TokensIterator},
|
||||||
UnspannedToken,
|
|
||||||
};
|
};
|
||||||
use crate::prelude::*;
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_source::Spanned;
|
use nu_source::{Spanned, SpannedItem};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct NumberShape;
|
pub struct NumberShape;
|
||||||
@ -26,9 +26,9 @@ impl ExpandExpression for NumberShape {
|
|||||||
) -> Result<hir::Expression, ParseError> {
|
) -> Result<hir::Expression, ParseError> {
|
||||||
parse_single_node(token_nodes, "Number", |token, token_span, err| {
|
parse_single_node(token_nodes, "Number", |token, token_span, err| {
|
||||||
Ok(match token {
|
Ok(match token {
|
||||||
UnspannedToken::GlobPattern | UnspannedToken::Operator(..) => {
|
UnspannedToken::GlobPattern
|
||||||
return Err(err.error())
|
| UnspannedToken::CompareOperator(..)
|
||||||
}
|
| UnspannedToken::EvaluationOperator(..) => return Err(err.error()),
|
||||||
UnspannedToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
UnspannedToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||||
hir::Expression::it_variable(tag, token_span)
|
hir::Expression::it_variable(tag, token_span)
|
||||||
}
|
}
|
||||||
@ -52,37 +52,6 @@ impl ExpandExpression for NumberShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for NumberShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = token_nodes.spanned(|token_nodes| {
|
|
||||||
expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
|
|
||||||
});
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Spanned { item: Err(_), span } => {
|
|
||||||
shapes.push(FlatShape::Error.spanned(span));
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
Spanned { item: Ok(atom), .. } => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
atom.color_tokens(shapes);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for NumberShape {
|
impl FallibleColorSyntax for NumberShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -131,7 +100,8 @@ impl ExpandExpression for IntShape {
|
|||||||
parse_single_node(token_nodes, "Integer", |token, token_span, err| {
|
parse_single_node(token_nodes, "Integer", |token, token_span, err| {
|
||||||
Ok(match token {
|
Ok(match token {
|
||||||
UnspannedToken::GlobPattern
|
UnspannedToken::GlobPattern
|
||||||
| UnspannedToken::Operator(..)
|
| UnspannedToken::CompareOperator(..)
|
||||||
|
| UnspannedToken::EvaluationOperator(..)
|
||||||
| UnspannedToken::ExternalWord => return Err(err.error()),
|
| UnspannedToken::ExternalWord => return Err(err.error()),
|
||||||
UnspannedToken::Variable(span) if span.slice(context.source) == "it" => {
|
UnspannedToken::Variable(span) if span.slice(context.source) == "it" => {
|
||||||
hir::Expression::it_variable(span, token_span)
|
hir::Expression::it_variable(span, token_span)
|
||||||
@ -151,37 +121,6 @@ impl ExpandExpression for IntShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for IntShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = token_nodes.spanned(|token_nodes| {
|
|
||||||
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
|
|
||||||
});
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Spanned { item: Err(_), span } => {
|
|
||||||
shapes.push(FlatShape::Error.spanned(span));
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
Spanned { item: Ok(atom), .. } => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
atom.color_tokens(shapes);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for IntShape {
|
impl FallibleColorSyntax for IntShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
@ -1,44 +1,18 @@
|
|||||||
use crate::parser::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
expand_atom, expand_bare, expression::expand_file_path, ExpandContext, ExpandExpression,
|
expand_atom, expand_bare, expression::expand_file_path, ExpandContext, ExpandExpression,
|
||||||
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, UnspannedAtomicToken,
|
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, UnspannedAtomicToken,
|
||||||
};
|
};
|
||||||
use crate::parser::parse::tokens::Token;
|
use crate::parse::operator::EvaluationOperator;
|
||||||
use crate::parser::{hir, hir::TokensIterator, Operator, TokenNode, UnspannedToken};
|
use crate::parse::tokens::{Token, UnspannedToken};
|
||||||
use crate::prelude::*;
|
use crate::{hir, hir::TokensIterator, TokenNode};
|
||||||
#[cfg(not(coloring_in_tokens))]
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_source::Spanned;
|
|
||||||
|
use nu_protocol::ShellTypeName;
|
||||||
|
use nu_source::{Span, SpannedItem};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct PatternShape;
|
pub struct PatternShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for PatternShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes.atomic(|token_nodes| {
|
|
||||||
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
|
|
||||||
|
|
||||||
match &atom.unspanned {
|
|
||||||
UnspannedAtomicToken::GlobPattern { .. } | UnspannedAtomicToken::Word { .. } => {
|
|
||||||
shapes.push(FlatShape::GlobPattern.spanned(atom.span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => Err(ShellError::type_error("pattern", atom.spanned_type_name())),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for PatternShape {
|
impl FallibleColorSyntax for PatternShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -81,11 +55,17 @@ impl ExpandExpression for PatternShape {
|
|||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
context: &ExpandContext,
|
||||||
) -> Result<hir::Expression, ParseError> {
|
) -> Result<hir::Expression, ParseError> {
|
||||||
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::new())?;
|
let atom = expand_atom(
|
||||||
|
token_nodes,
|
||||||
|
"pattern",
|
||||||
|
context,
|
||||||
|
ExpansionRule::new().allow_external_word(),
|
||||||
|
)?;
|
||||||
|
|
||||||
match atom.unspanned {
|
match atom.unspanned {
|
||||||
UnspannedAtomicToken::Word { text: body }
|
UnspannedAtomicToken::Word { text: body }
|
||||||
| UnspannedAtomicToken::String { body }
|
| UnspannedAtomicToken::String { body }
|
||||||
|
| UnspannedAtomicToken::ExternalWord { text: body }
|
||||||
| UnspannedAtomicToken::GlobPattern { pattern: body } => {
|
| UnspannedAtomicToken::GlobPattern { pattern: body } => {
|
||||||
let path = expand_file_path(body.slice(context.source), context);
|
let path = expand_file_path(body.slice(context.source), context);
|
||||||
return Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span));
|
return Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span));
|
||||||
@ -116,7 +96,7 @@ impl ExpandSyntax for BarePatternShape {
|
|||||||
..
|
..
|
||||||
})
|
})
|
||||||
| TokenNode::Token(Token {
|
| TokenNode::Token(Token {
|
||||||
unspanned: UnspannedToken::Operator(Operator::Dot),
|
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
||||||
..
|
..
|
||||||
})
|
})
|
||||||
| TokenNode::Token(Token {
|
| TokenNode::Token(Token {
|
103
crates/nu-parser/src/hir/syntax_shape/expression/range.rs
Normal file
103
crates/nu-parser/src/hir/syntax_shape/expression/range.rs
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
use crate::hir::syntax_shape::expression::UnspannedAtomicToken;
|
||||||
|
use crate::hir::syntax_shape::{
|
||||||
|
color_fallible_syntax, expand_atom, expand_expr, AnyExpressionShape, ExpandContext,
|
||||||
|
ExpandExpression, ExpansionRule, FallibleColorSyntax, FlatShape,
|
||||||
|
};
|
||||||
|
use crate::parse::operator::EvaluationOperator;
|
||||||
|
use crate::parse::token_tree::TokenNode;
|
||||||
|
use crate::parse::tokens::{Token, UnspannedToken};
|
||||||
|
use crate::{hir, hir::TokensIterator};
|
||||||
|
use nu_errors::{ParseError, ShellError};
|
||||||
|
use nu_protocol::SpannedTypeName;
|
||||||
|
use nu_source::SpannedItem;
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct RangeShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for RangeShape {
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"range"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ParseError> {
|
||||||
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
let left = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
||||||
|
|
||||||
|
let atom = expand_atom(
|
||||||
|
token_nodes,
|
||||||
|
"..",
|
||||||
|
context,
|
||||||
|
ExpansionRule::new().allow_eval_operator(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let span = match atom.unspanned {
|
||||||
|
UnspannedAtomicToken::DotDot { text } => text,
|
||||||
|
_ => return Err(ParseError::mismatch("..", atom.spanned_type_name())),
|
||||||
|
};
|
||||||
|
|
||||||
|
let right = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
||||||
|
|
||||||
|
Ok(hir::Expression::range(left, span, right))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FallibleColorSyntax for RangeShape {
|
||||||
|
type Info = ();
|
||||||
|
type Input = ();
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"RangeShape"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn color_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
_input: &(),
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?;
|
||||||
|
color_fallible_syntax(&DotDotShape, token_nodes, context)?;
|
||||||
|
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
struct DotDotShape;
|
||||||
|
|
||||||
|
impl FallibleColorSyntax for DotDotShape {
|
||||||
|
type Info = ();
|
||||||
|
type Input = ();
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
".."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn color_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
_input: &Self::Input,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
_context: &ExpandContext,
|
||||||
|
) -> Result<Self::Info, ShellError> {
|
||||||
|
let peeked = token_nodes.peek_any().not_eof("..")?;
|
||||||
|
match &peeked.node {
|
||||||
|
TokenNode::Token(Token {
|
||||||
|
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot),
|
||||||
|
span,
|
||||||
|
}) => {
|
||||||
|
peeked.commit();
|
||||||
|
token_nodes.color_shape(FlatShape::DotDot.spanned(span));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
token => Err(ShellError::type_error("..", token.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,48 +1,16 @@
|
|||||||
use crate::parser::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
|
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
|
||||||
ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, TestSyntax, UnspannedAtomicToken,
|
ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, UnspannedAtomicToken,
|
||||||
};
|
};
|
||||||
use crate::parser::hir::tokens_iterator::Peeked;
|
use crate::hir::tokens_iterator::Peeked;
|
||||||
use crate::parser::{hir, hir::TokensIterator, UnspannedToken};
|
use crate::parse::tokens::UnspannedToken;
|
||||||
use crate::prelude::*;
|
use crate::{hir, hir::TokensIterator};
|
||||||
#[cfg(not(coloring_in_tokens))]
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_source::Spanned;
|
use nu_source::SpannedItem;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct StringShape;
|
pub struct StringShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for StringShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = FlatShape;
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
input: &FlatShape,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Err(_) => return Ok(()),
|
|
||||||
Ok(atom) => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
match atom {
|
|
||||||
AtomicToken {
|
|
||||||
unspanned: UnspannedAtomicToken::String { .. },
|
|
||||||
span,
|
|
||||||
} => shapes.push((*input).spanned(span)),
|
|
||||||
other => other.color_tokens(shapes),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for StringShape {
|
impl FallibleColorSyntax for StringShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = FlatShape;
|
type Input = FlatShape;
|
||||||
@ -89,7 +57,8 @@ impl ExpandExpression for StringShape {
|
|||||||
parse_single_node(token_nodes, "String", |token, token_span, err| {
|
parse_single_node(token_nodes, "String", |token, token_span, err| {
|
||||||
Ok(match token {
|
Ok(match token {
|
||||||
UnspannedToken::GlobPattern
|
UnspannedToken::GlobPattern
|
||||||
| UnspannedToken::Operator(..)
|
| UnspannedToken::CompareOperator(..)
|
||||||
|
| UnspannedToken::EvaluationOperator(..)
|
||||||
| UnspannedToken::ExternalWord => return Err(err.error()),
|
| UnspannedToken::ExternalWord => return Err(err.error()),
|
||||||
UnspannedToken::Variable(span) => {
|
UnspannedToken::Variable(span) => {
|
||||||
expand_variable(span, token_span, &context.source)
|
expand_variable(span, token_span, &context.source)
|
@ -1,15 +1,16 @@
|
|||||||
use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax, ParseError};
|
use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax};
|
||||||
use crate::parser::parse::tokens::RawNumber;
|
use crate::parse::tokens::RawNumber;
|
||||||
use crate::parser::parse::tokens::Token;
|
use crate::parse::tokens::Token;
|
||||||
use crate::parser::parse::unit::Unit;
|
use crate::parse::tokens::UnspannedToken;
|
||||||
use crate::parser::{hir::TokensIterator, TokenNode, UnspannedToken};
|
use crate::parse::unit::Unit;
|
||||||
use crate::prelude::*;
|
use crate::{hir::TokensIterator, TokenNode};
|
||||||
use nom::branch::alt;
|
use nom::branch::alt;
|
||||||
use nom::bytes::complete::tag;
|
use nom::bytes::complete::tag;
|
||||||
use nom::character::complete::digit1;
|
use nom::character::complete::digit1;
|
||||||
use nom::combinator::{all_consuming, opt, value};
|
use nom::combinator::{all_consuming, opt, value};
|
||||||
use nom::IResult;
|
use nom::IResult;
|
||||||
use nu_source::{Span, Spanned};
|
use nu_errors::ParseError;
|
||||||
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct UnitSyntax {
|
pub struct UnitSyntax {
|
@ -1,16 +1,19 @@
|
|||||||
use crate::parser::hir::path::PathMember;
|
use crate::hir::syntax_shape::{
|
||||||
use crate::parser::hir::syntax_shape::{
|
|
||||||
color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax,
|
color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax,
|
||||||
parse_single_node, AnyExpressionShape, BareShape, ExpandContext, ExpandExpression,
|
parse_single_node, AnyExpressionShape, BareShape, ExpandContext, ExpandExpression,
|
||||||
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, Peeked, SkipSyntax,
|
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, Peeked, SkipSyntax,
|
||||||
StringShape, TestSyntax, UnspannedAtomicToken, WhitespaceShape,
|
StringShape, TestSyntax, UnspannedAtomicToken, WhitespaceShape,
|
||||||
};
|
};
|
||||||
use crate::parser::{
|
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
||||||
hir, hir::Expression, hir::TokensIterator, Operator, RawNumber, UnspannedToken,
|
use crate::{hir, hir::Expression, hir::TokensIterator, CompareOperator, EvaluationOperator};
|
||||||
|
use nu_errors::ShellError;
|
||||||
|
use nu_protocol::{PathMember, ShellTypeName};
|
||||||
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||||
|
Tag, Tagged, TaggedItem, Text,
|
||||||
};
|
};
|
||||||
use crate::prelude::*;
|
use num_bigint::BigInt;
|
||||||
use nu_source::{Spanned, Tagged};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde::Serialize;
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
@ -54,48 +57,6 @@ impl ExpandExpression for VariablePathShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for VariablePathShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes.atomic(|token_nodes| {
|
|
||||||
// If the head of the token stream is not a variable, fail
|
|
||||||
color_fallible_syntax(&VariableShape, token_nodes, context, shapes)?;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
// look for a dot at the head of a stream
|
|
||||||
let dot = color_fallible_syntax_with(
|
|
||||||
&ColorableDotShape,
|
|
||||||
&FlatShape::Dot,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
shapes,
|
|
||||||
);
|
|
||||||
|
|
||||||
// if there's no dot, we're done
|
|
||||||
match dot {
|
|
||||||
Err(_) => break,
|
|
||||||
Ok(_) => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// otherwise, look for a member, and if you don't find one, fail
|
|
||||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for VariablePathShape {
|
impl FallibleColorSyntax for VariablePathShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -141,40 +102,6 @@ impl FallibleColorSyntax for VariablePathShape {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct PathTailShape;
|
pub struct PathTailShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
/// The failure mode of `PathTailShape` is a dot followed by a non-member
|
|
||||||
impl FallibleColorSyntax for PathTailShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes.atomic(|token_nodes| loop {
|
|
||||||
let result = color_fallible_syntax_with(
|
|
||||||
&ColorableDotShape,
|
|
||||||
&FlatShape::Dot,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
shapes,
|
|
||||||
);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Err(_) => return Ok(()),
|
|
||||||
Ok(_) => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we've seen a dot but not a member, fail
|
|
||||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
/// The failure mode of `PathTailShape` is a dot followed by a non-member
|
/// The failure mode of `PathTailShape` is a dot followed by a non-member
|
||||||
impl FallibleColorSyntax for PathTailShape {
|
impl FallibleColorSyntax for PathTailShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
@ -268,7 +195,7 @@ impl ExpandSyntax for PathTailShape {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum ExpressionContinuation {
|
pub enum ExpressionContinuation {
|
||||||
DotSuffix(Span, PathMember),
|
DotSuffix(Span, PathMember),
|
||||||
InfixSuffix(Spanned<Operator>, Expression),
|
InfixSuffix(Spanned<CompareOperator>, Expression),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for ExpressionContinuation {
|
impl PrettyDebugWithSource for ExpressionContinuation {
|
||||||
@ -339,60 +266,6 @@ pub enum ContinuationInfo {
|
|||||||
Infix,
|
Infix,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for ExpressionContinuationShape {
|
|
||||||
type Info = ContinuationInfo;
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<ContinuationInfo, ShellError> {
|
|
||||||
token_nodes.atomic(|token_nodes| {
|
|
||||||
// Try to expand a `.`
|
|
||||||
let dot = color_fallible_syntax_with(
|
|
||||||
&ColorableDotShape,
|
|
||||||
&FlatShape::Dot,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
shapes,
|
|
||||||
);
|
|
||||||
|
|
||||||
match dot {
|
|
||||||
Ok(_) => {
|
|
||||||
// we found a dot, so let's keep looking for a member; if no member was found, fail
|
|
||||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
|
||||||
|
|
||||||
Ok(ContinuationInfo::Dot)
|
|
||||||
}
|
|
||||||
Err(_) => {
|
|
||||||
let mut new_shapes = vec![];
|
|
||||||
let result = token_nodes.atomic(|token_nodes| {
|
|
||||||
// we didn't find a dot, so let's see if we're looking at an infix. If not found, fail
|
|
||||||
color_fallible_syntax(&InfixShape, token_nodes, context, &mut new_shapes)?;
|
|
||||||
|
|
||||||
// now that we've seen an infix shape, look for any expression. If not found, fail
|
|
||||||
color_fallible_syntax(
|
|
||||||
&AnyExpressionShape,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
&mut new_shapes,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(ContinuationInfo::Infix)
|
|
||||||
})?;
|
|
||||||
shapes.extend(new_shapes);
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for ExpressionContinuationShape {
|
impl FallibleColorSyntax for ExpressionContinuationShape {
|
||||||
type Info = ContinuationInfo;
|
type Info = ContinuationInfo;
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -469,45 +342,6 @@ impl ExpandExpression for VariableShape {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for VariableShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"variable",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Err(err) => return Err(err.into()),
|
|
||||||
Ok(atom) => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
match &atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Variable { .. } => {
|
|
||||||
shapes.push(FlatShape::Variable.spanned(atom.span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::ItVariable { .. } => {
|
|
||||||
shapes.push(FlatShape::ItVariable.spanned(atom.span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
_ => Err(ShellError::type_error("variable", atom.spanned_type_name())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for VariableShape {
|
impl FallibleColorSyntax for VariableShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -705,57 +539,6 @@ pub fn expand_column_path<'a, 'b>(
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ColumnPathShape;
|
pub struct ColumnPathShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for ColumnPathShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
// If there's not even one member shape, fail
|
|
||||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let checkpoint = token_nodes.checkpoint();
|
|
||||||
|
|
||||||
match color_fallible_syntax_with(
|
|
||||||
&ColorableDotShape,
|
|
||||||
&FlatShape::Dot,
|
|
||||||
checkpoint.iterator,
|
|
||||||
context,
|
|
||||||
shapes,
|
|
||||||
) {
|
|
||||||
Err(_) => {
|
|
||||||
// we already saw at least one member shape, so return successfully
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(_) => {
|
|
||||||
match color_fallible_syntax(&MemberShape, checkpoint.iterator, context, shapes)
|
|
||||||
{
|
|
||||||
Err(_) => {
|
|
||||||
// we saw a dot but not a member (but we saw at least one member),
|
|
||||||
// so don't commit the dot but return successfully
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(_) => {
|
|
||||||
// we saw a dot and a member, so commit it and continue on
|
|
||||||
checkpoint.commit();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for ColumnPathShape {
|
impl FallibleColorSyntax for ColumnPathShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -849,45 +632,6 @@ impl ExpandSyntax for ColumnPathShape {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct MemberShape;
|
pub struct MemberShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for MemberShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let bare = color_fallible_syntax_with(
|
|
||||||
&BareShape,
|
|
||||||
&FlatShape::BareMember,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
shapes,
|
|
||||||
);
|
|
||||||
|
|
||||||
match bare {
|
|
||||||
Ok(_) => return Ok(()),
|
|
||||||
Err(_) => {
|
|
||||||
// If we don't have a bare word, we'll look for a string
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Look for a string token. If we don't find one, fail
|
|
||||||
color_fallible_syntax_with(
|
|
||||||
&StringShape,
|
|
||||||
&FlatShape::StringMember,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
shapes,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for MemberShape {
|
impl FallibleColorSyntax for MemberShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -1017,33 +761,6 @@ pub struct DotShape;
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ColorableDotShape;
|
pub struct ColorableDotShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for ColorableDotShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = FlatShape;
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
input: &FlatShape,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
_context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let peeked = token_nodes.peek_any().not_eof("dot")?;
|
|
||||||
|
|
||||||
match peeked.node {
|
|
||||||
node if node.is_dot() => {
|
|
||||||
peeked.commit();
|
|
||||||
shapes.push((*input).spanned(node.span()));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
other => Err(ShellError::type_error("dot", other.spanned_type_name())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for ColorableDotShape {
|
impl FallibleColorSyntax for ColorableDotShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = FlatShape;
|
type Input = FlatShape;
|
||||||
@ -1101,7 +818,7 @@ impl ExpandSyntax for DotShape {
|
|||||||
) -> Result<Self::Output, ParseError> {
|
) -> Result<Self::Output, ParseError> {
|
||||||
parse_single_node(token_nodes, "dot", |token, token_span, _| {
|
parse_single_node(token_nodes, "dot", |token, token_span, _| {
|
||||||
Ok(match token {
|
Ok(match token {
|
||||||
UnspannedToken::Operator(Operator::Dot) => token_span,
|
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => token_span,
|
||||||
_ => {
|
_ => {
|
||||||
return Err(ParseError::mismatch(
|
return Err(ParseError::mismatch(
|
||||||
"dot",
|
"dot",
|
||||||
@ -1116,52 +833,6 @@ impl ExpandSyntax for DotShape {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct InfixShape;
|
pub struct InfixShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl FallibleColorSyntax for InfixShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
outer_shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let checkpoint = token_nodes.checkpoint();
|
|
||||||
let mut shapes = vec![];
|
|
||||||
|
|
||||||
// An infix operator must be prefixed by whitespace. If no whitespace was found, fail
|
|
||||||
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?;
|
|
||||||
|
|
||||||
// Parse the next TokenNode after the whitespace
|
|
||||||
parse_single_node(
|
|
||||||
checkpoint.iterator,
|
|
||||||
"infix operator",
|
|
||||||
|token, token_span, err| {
|
|
||||||
match token {
|
|
||||||
// If it's an operator (and not `.`), it's a match
|
|
||||||
UnspannedToken::Operator(operator) if operator != Operator::Dot => {
|
|
||||||
shapes.push(FlatShape::Operator.spanned(token_span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, it's not a match
|
|
||||||
_ => Err(err.error()),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// An infix operator must be followed by whitespace. If no whitespace was found, fail
|
|
||||||
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?;
|
|
||||||
|
|
||||||
outer_shapes.extend(shapes);
|
|
||||||
checkpoint.commit();
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl FallibleColorSyntax for InfixShape {
|
impl FallibleColorSyntax for InfixShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = ();
|
type Input = ();
|
||||||
@ -1188,9 +859,7 @@ impl FallibleColorSyntax for InfixShape {
|
|||||||
|token, token_span, _| {
|
|token, token_span, _| {
|
||||||
match token {
|
match token {
|
||||||
// If it's an operator (and not `.`), it's a match
|
// If it's an operator (and not `.`), it's a match
|
||||||
UnspannedToken::Operator(operator) if operator != Operator::Dot => {
|
UnspannedToken::CompareOperator(_operator) => Ok(token_span),
|
||||||
Ok(token_span)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, it's not a match
|
// Otherwise, it's not a match
|
||||||
_ => Err(ParseError::mismatch(
|
_ => Err(ParseError::mismatch(
|
||||||
@ -1203,7 +872,7 @@ impl FallibleColorSyntax for InfixShape {
|
|||||||
|
|
||||||
checkpoint
|
checkpoint
|
||||||
.iterator
|
.iterator
|
||||||
.color_shape(FlatShape::Operator.spanned(operator_span));
|
.color_shape(FlatShape::CompareOperator.spanned(operator_span));
|
||||||
|
|
||||||
// An infix operator must be followed by whitespace. If no whitespace was found, fail
|
// An infix operator must be followed by whitespace. If no whitespace was found, fail
|
||||||
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||||
@ -1263,7 +932,7 @@ impl ExpandSyntax for InfixShape {
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct InfixInnerSyntax {
|
pub struct InfixInnerSyntax {
|
||||||
pub operator: Spanned<Operator>,
|
pub operator: Spanned<CompareOperator>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasSpan for InfixInnerSyntax {
|
impl HasSpan for InfixInnerSyntax {
|
||||||
@ -1295,12 +964,10 @@ impl ExpandSyntax for InfixInnerShape {
|
|||||||
) -> Result<Self::Output, ParseError> {
|
) -> Result<Self::Output, ParseError> {
|
||||||
parse_single_node(token_nodes, "infix operator", |token, token_span, err| {
|
parse_single_node(token_nodes, "infix operator", |token, token_span, err| {
|
||||||
Ok(match token {
|
Ok(match token {
|
||||||
// If it's an operator (and not `.`), it's a match
|
// If it's a comparison operator, it's a match
|
||||||
UnspannedToken::Operator(operator) if operator != Operator::Dot => {
|
UnspannedToken::CompareOperator(operator) => InfixInnerSyntax {
|
||||||
InfixInnerSyntax {
|
operator: operator.spanned(token_span),
|
||||||
operator: operator.spanned(token_span),
|
},
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, it's not a match
|
// Otherwise, it's not a match
|
||||||
_ => return Err(err.error()),
|
_ => return Err(err.error()),
|
@ -1,4 +1,7 @@
|
|||||||
use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, TokenNode, UnspannedToken};
|
use crate::parse::flag::{Flag, FlagKind};
|
||||||
|
use crate::parse::operator::EvaluationOperator;
|
||||||
|
use crate::parse::token_tree::{Delimiter, TokenNode};
|
||||||
|
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
||||||
use nu_source::{HasSpan, Span, Spanned, SpannedItem, Text};
|
use nu_source::{HasSpan, Span, Spanned, SpannedItem, Text};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
@ -7,8 +10,9 @@ pub enum FlatShape {
|
|||||||
CloseDelimiter(Delimiter),
|
CloseDelimiter(Delimiter),
|
||||||
ItVariable,
|
ItVariable,
|
||||||
Variable,
|
Variable,
|
||||||
Operator,
|
CompareOperator,
|
||||||
Dot,
|
Dot,
|
||||||
|
DotDot,
|
||||||
InternalCommand,
|
InternalCommand,
|
||||||
ExternalCommand,
|
ExternalCommand,
|
||||||
ExternalWord,
|
ExternalWord,
|
||||||
@ -24,7 +28,9 @@ pub enum FlatShape {
|
|||||||
Int,
|
Int,
|
||||||
Decimal,
|
Decimal,
|
||||||
Whitespace,
|
Whitespace,
|
||||||
|
Separator,
|
||||||
Error,
|
Error,
|
||||||
|
Comment,
|
||||||
Size { number: Span, unit: Span },
|
Size { number: Span, unit: Span },
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -38,10 +44,15 @@ impl FlatShape {
|
|||||||
UnspannedToken::Number(RawNumber::Decimal(_)) => {
|
UnspannedToken::Number(RawNumber::Decimal(_)) => {
|
||||||
shapes.push(FlatShape::Decimal.spanned(token.span))
|
shapes.push(FlatShape::Decimal.spanned(token.span))
|
||||||
}
|
}
|
||||||
UnspannedToken::Operator(Operator::Dot) => {
|
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => {
|
||||||
shapes.push(FlatShape::Dot.spanned(token.span))
|
shapes.push(FlatShape::Dot.spanned(token.span))
|
||||||
}
|
}
|
||||||
UnspannedToken::Operator(_) => shapes.push(FlatShape::Operator.spanned(token.span)),
|
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => {
|
||||||
|
shapes.push(FlatShape::DotDot.spanned(token.span))
|
||||||
|
}
|
||||||
|
UnspannedToken::CompareOperator(_) => {
|
||||||
|
shapes.push(FlatShape::CompareOperator.spanned(token.span))
|
||||||
|
}
|
||||||
UnspannedToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)),
|
UnspannedToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)),
|
||||||
UnspannedToken::Variable(v) if v.slice(source) == "it" => {
|
UnspannedToken::Variable(v) if v.slice(source) == "it" => {
|
||||||
shapes.push(FlatShape::ItVariable.spanned(token.span))
|
shapes.push(FlatShape::ItVariable.spanned(token.span))
|
||||||
@ -89,6 +100,8 @@ impl FlatShape {
|
|||||||
..
|
..
|
||||||
}) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)),
|
}) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)),
|
||||||
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())),
|
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())),
|
||||||
|
TokenNode::Separator(_) => shapes.push(FlatShape::Separator.spanned(token.span())),
|
||||||
|
TokenNode::Comment(_) => shapes.push(FlatShape::Comment.spanned(token.span())),
|
||||||
TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)),
|
TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)),
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,38 +1,25 @@
|
|||||||
pub(crate) mod debug;
|
pub(crate) mod debug;
|
||||||
|
|
||||||
use self::debug::{ColorTracer, ExpandTracer};
|
use self::debug::{ColorTracer, ExpandTracer};
|
||||||
use crate::errors::ShellError;
|
|
||||||
#[cfg(coloring_in_tokens)]
|
use crate::hir::syntax_shape::FlatShape;
|
||||||
use crate::parser::hir::syntax_shape::FlatShape;
|
use crate::hir::Expression;
|
||||||
use crate::parser::hir::Expression;
|
use crate::TokenNode;
|
||||||
use crate::parser::TokenNode;
|
|
||||||
use crate::prelude::*;
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
use getset::{Getters, MutGetters};
|
use getset::{Getters, MutGetters};
|
||||||
use nu_source::Spanned;
|
use nu_errors::{ParseError, ShellError};
|
||||||
|
use nu_protocol::SpannedTypeName;
|
||||||
|
use nu_source::{HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Text};
|
||||||
|
|
||||||
cfg_if::cfg_if! {
|
#[derive(Getters, Debug)]
|
||||||
if #[cfg(coloring_in_tokens)] {
|
pub struct TokensIteratorState<'content> {
|
||||||
#[derive(Getters, Debug)]
|
tokens: &'content [TokenNode],
|
||||||
pub struct TokensIteratorState<'content> {
|
span: Span,
|
||||||
tokens: &'content [TokenNode],
|
skip_ws: bool,
|
||||||
span: Span,
|
index: usize,
|
||||||
skip_ws: bool,
|
seen: indexmap::IndexSet<usize>,
|
||||||
index: usize,
|
#[get = "pub"]
|
||||||
seen: indexmap::IndexSet<usize>,
|
shapes: Vec<Spanned<FlatShape>>,
|
||||||
#[get = "pub"]
|
|
||||||
shapes: Vec<Spanned<FlatShape>>,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
#[derive(Getters, Debug)]
|
|
||||||
pub struct TokensIteratorState<'content> {
|
|
||||||
tokens: &'content [TokenNode],
|
|
||||||
span: Span,
|
|
||||||
skip_ws: bool,
|
|
||||||
index: usize,
|
|
||||||
seen: indexmap::IndexSet<usize>,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Getters, MutGetters, Debug)]
|
#[derive(Getters, MutGetters, Debug)]
|
||||||
@ -53,7 +40,7 @@ pub struct Checkpoint<'content, 'me> {
|
|||||||
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||||
index: usize,
|
index: usize,
|
||||||
seen: indexmap::IndexSet<usize>,
|
seen: indexmap::IndexSet<usize>,
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
shape_start: usize,
|
shape_start: usize,
|
||||||
committed: bool,
|
committed: bool,
|
||||||
}
|
}
|
||||||
@ -71,7 +58,7 @@ impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
|
|||||||
|
|
||||||
state.index = self.index;
|
state.index = self.index;
|
||||||
state.seen = self.seen.clone();
|
state.seen = self.seen.clone();
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
state.shapes.truncate(self.shape_start);
|
state.shapes.truncate(self.shape_start);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -150,7 +137,7 @@ impl<'content, 'me> PeekedNode<'content, 'me> {
|
|||||||
pub fn peek_error(node: &Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError {
|
pub fn peek_error(node: &Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError {
|
||||||
match node {
|
match node {
|
||||||
None => ParseError::unexpected_eof(expected, eof_span),
|
None => ParseError::unexpected_eof(expected, eof_span),
|
||||||
Some(node) => ParseError::mismatch(expected, node.type_name().spanned(node.span())),
|
Some(node) => ParseError::mismatch(expected, node.spanned_type_name()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -161,34 +148,17 @@ impl<'content> TokensIterator<'content> {
|
|||||||
source: Text,
|
source: Text,
|
||||||
skip_ws: bool,
|
skip_ws: bool,
|
||||||
) -> TokensIterator<'content> {
|
) -> TokensIterator<'content> {
|
||||||
cfg_if::cfg_if! {
|
TokensIterator {
|
||||||
if #[cfg(coloring_in_tokens)] {
|
state: TokensIteratorState {
|
||||||
TokensIterator {
|
tokens: items,
|
||||||
state: TokensIteratorState {
|
span,
|
||||||
tokens: items,
|
skip_ws,
|
||||||
span,
|
index: 0,
|
||||||
skip_ws,
|
seen: indexmap::IndexSet::new(),
|
||||||
index: 0,
|
shapes: vec![],
|
||||||
seen: indexmap::IndexSet::new(),
|
},
|
||||||
shapes: vec![],
|
color_tracer: ColorTracer::new(source.clone()),
|
||||||
},
|
expand_tracer: ExpandTracer::new(source.clone()),
|
||||||
color_tracer: ColorTracer::new(source.clone()),
|
|
||||||
expand_tracer: ExpandTracer::new(source.clone()),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
TokensIterator {
|
|
||||||
state: TokensIteratorState {
|
|
||||||
tokens: items,
|
|
||||||
span,
|
|
||||||
skip_ws,
|
|
||||||
index: 0,
|
|
||||||
seen: indexmap::IndexSet::new(),
|
|
||||||
},
|
|
||||||
color_tracer: ColorTracer::new(source.clone()),
|
|
||||||
expand_tracer: ExpandTracer::new(source.clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -217,13 +187,11 @@ impl<'content> TokensIterator<'content> {
|
|||||||
result.spanned(start.until(end))
|
result.spanned(start.until(end))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn color_shape(&mut self, shape: Spanned<FlatShape>) {
|
pub fn color_shape(&mut self, shape: Spanned<FlatShape>) {
|
||||||
self.with_color_tracer(|_, tracer| tracer.add_shape(shape));
|
self.with_color_tracer(|_, tracer| tracer.add_shape(shape));
|
||||||
self.state.shapes.push(shape);
|
self.state.shapes.push(shape);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
|
pub fn mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
|
||||||
let new_shapes: Vec<Spanned<FlatShape>> = {
|
let new_shapes: Vec<Spanned<FlatShape>> = {
|
||||||
let shapes = &mut self.state.shapes;
|
let shapes = &mut self.state.shapes;
|
||||||
@ -239,13 +207,11 @@ impl<'content> TokensIterator<'content> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn silently_mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
|
pub fn silently_mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
|
||||||
let shapes = &mut self.state.shapes;
|
let shapes = &mut self.state.shapes;
|
||||||
block(shapes);
|
block(shapes);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn sort_shapes(&mut self) {
|
pub fn sort_shapes(&mut self) {
|
||||||
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
||||||
// this solution.
|
// this solution.
|
||||||
@ -255,7 +221,6 @@ impl<'content> TokensIterator<'content> {
|
|||||||
.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
|
.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn child<'me, T>(
|
pub fn child<'me, T>(
|
||||||
&'me mut self,
|
&'me mut self,
|
||||||
tokens: Spanned<&'me [TokenNode]>,
|
tokens: Spanned<&'me [TokenNode]>,
|
||||||
@ -271,57 +236,6 @@ impl<'content> TokensIterator<'content> {
|
|||||||
let mut expand_tracer = ExpandTracer::new(source.clone());
|
let mut expand_tracer = ExpandTracer::new(source.clone());
|
||||||
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
|
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
|
||||||
|
|
||||||
cfg_if::cfg_if! {
|
|
||||||
if #[cfg(coloring_in_tokens)] {
|
|
||||||
let mut iterator = TokensIterator {
|
|
||||||
state: TokensIteratorState {
|
|
||||||
tokens: tokens.item,
|
|
||||||
span: tokens.span,
|
|
||||||
skip_ws: false,
|
|
||||||
index: 0,
|
|
||||||
seen: indexmap::IndexSet::new(),
|
|
||||||
shapes,
|
|
||||||
},
|
|
||||||
color_tracer,
|
|
||||||
expand_tracer,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
let mut iterator = TokensIterator {
|
|
||||||
state: TokensIteratorState {
|
|
||||||
tokens: tokens.item,
|
|
||||||
span: tokens.span,
|
|
||||||
skip_ws: false,
|
|
||||||
index: 0,
|
|
||||||
seen: indexmap::IndexSet::new(),
|
|
||||||
},
|
|
||||||
color_tracer,
|
|
||||||
expand_tracer,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = block(&mut iterator);
|
|
||||||
|
|
||||||
std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes);
|
|
||||||
std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer);
|
|
||||||
std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer);
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
pub fn child<'me, T>(
|
|
||||||
&'me mut self,
|
|
||||||
tokens: Spanned<&'me [TokenNode]>,
|
|
||||||
source: Text,
|
|
||||||
block: impl FnOnce(&mut TokensIterator<'me>) -> T,
|
|
||||||
) -> T {
|
|
||||||
let mut color_tracer = ColorTracer::new(source.clone());
|
|
||||||
std::mem::swap(&mut color_tracer, &mut self.color_tracer);
|
|
||||||
|
|
||||||
let mut expand_tracer = ExpandTracer::new(source.clone());
|
|
||||||
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
|
|
||||||
|
|
||||||
let mut iterator = TokensIterator {
|
let mut iterator = TokensIterator {
|
||||||
state: TokensIteratorState {
|
state: TokensIteratorState {
|
||||||
tokens: tokens.item,
|
tokens: tokens.item,
|
||||||
@ -329,6 +243,7 @@ impl<'content> TokensIterator<'content> {
|
|||||||
skip_ws: false,
|
skip_ws: false,
|
||||||
index: 0,
|
index: 0,
|
||||||
seen: indexmap::IndexSet::new(),
|
seen: indexmap::IndexSet::new(),
|
||||||
|
shapes,
|
||||||
},
|
},
|
||||||
color_tracer,
|
color_tracer,
|
||||||
expand_tracer,
|
expand_tracer,
|
||||||
@ -336,6 +251,7 @@ impl<'content> TokensIterator<'content> {
|
|||||||
|
|
||||||
let result = block(&mut iterator);
|
let result = block(&mut iterator);
|
||||||
|
|
||||||
|
std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes);
|
||||||
std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer);
|
std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer);
|
||||||
std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer);
|
std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer);
|
||||||
|
|
||||||
@ -362,7 +278,6 @@ impl<'content> TokensIterator<'content> {
|
|||||||
block(state, tracer)
|
block(state, tracer)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
pub fn color_frame<T>(
|
pub fn color_frame<T>(
|
||||||
&mut self,
|
&mut self,
|
||||||
desc: &'static str,
|
desc: &'static str,
|
||||||
@ -455,7 +370,7 @@ impl<'content> TokensIterator<'content> {
|
|||||||
let state = &mut self.state;
|
let state = &mut self.state;
|
||||||
|
|
||||||
let index = state.index;
|
let index = state.index;
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
let shape_start = state.shapes.len();
|
let shape_start = state.shapes.len();
|
||||||
let seen = state.seen.clone();
|
let seen = state.seen.clone();
|
||||||
|
|
||||||
@ -464,7 +379,7 @@ impl<'content> TokensIterator<'content> {
|
|||||||
index,
|
index,
|
||||||
seen,
|
seen,
|
||||||
committed: false,
|
committed: false,
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
shape_start,
|
shape_start,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -478,7 +393,7 @@ impl<'content> TokensIterator<'content> {
|
|||||||
let state = &mut self.state;
|
let state = &mut self.state;
|
||||||
|
|
||||||
let index = state.index;
|
let index = state.index;
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
let shape_start = state.shapes.len();
|
let shape_start = state.shapes.len();
|
||||||
let seen = state.seen.clone();
|
let seen = state.seen.clone();
|
||||||
|
|
||||||
@ -487,7 +402,7 @@ impl<'content> TokensIterator<'content> {
|
|||||||
index,
|
index,
|
||||||
seen,
|
seen,
|
||||||
committed: false,
|
committed: false,
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
shape_start,
|
shape_start,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -499,14 +414,14 @@ impl<'content> TokensIterator<'content> {
|
|||||||
|
|
||||||
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||||
/// that you'll succeed.
|
/// that you'll succeed.
|
||||||
pub fn atomic_parse<'me, T>(
|
pub fn atomic_parse<'me, T, E>(
|
||||||
&'me mut self,
|
&'me mut self,
|
||||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ParseError>,
|
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, E>,
|
||||||
) -> Result<T, ParseError> {
|
) -> Result<T, E> {
|
||||||
let state = &mut self.state;
|
let state = &mut self.state;
|
||||||
|
|
||||||
let index = state.index;
|
let index = state.index;
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
let shape_start = state.shapes.len();
|
let shape_start = state.shapes.len();
|
||||||
let seen = state.seen.clone();
|
let seen = state.seen.clone();
|
||||||
|
|
||||||
@ -515,7 +430,7 @@ impl<'content> TokensIterator<'content> {
|
|||||||
index,
|
index,
|
||||||
seen,
|
seen,
|
||||||
committed: false,
|
committed: false,
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
shape_start,
|
shape_start,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -525,7 +440,6 @@ impl<'content> TokensIterator<'content> {
|
|||||||
return Ok(value);
|
return Ok(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||||
/// that you'll succeed.
|
/// that you'll succeed.
|
||||||
pub fn atomic_returning_shapes<'me, T>(
|
pub fn atomic_returning_shapes<'me, T>(
|
||||||
@ -641,7 +555,7 @@ impl<'content> TokensIterator<'content> {
|
|||||||
// index: state.index,
|
// index: state.index,
|
||||||
// seen: state.seen.clone(),
|
// seen: state.seen.clone(),
|
||||||
// skip_ws: state.skip_ws,
|
// skip_ws: state.skip_ws,
|
||||||
// #[cfg(coloring_in_tokens)]
|
//
|
||||||
// shapes: state.shapes.clone(),
|
// shapes: state.shapes.clone(),
|
||||||
// },
|
// },
|
||||||
// color_tracer: self.color_tracer.clone(),
|
// color_tracer: self.color_tracer.clone(),
|
@ -6,7 +6,7 @@ pub(crate) mod expand_trace;
|
|||||||
pub(crate) use self::color_trace::*;
|
pub(crate) use self::color_trace::*;
|
||||||
pub(crate) use self::expand_trace::*;
|
pub(crate) use self::expand_trace::*;
|
||||||
|
|
||||||
use crate::parser::hir::tokens_iterator::TokensIteratorState;
|
use crate::hir::tokens_iterator::TokensIteratorState;
|
||||||
use nu_source::{PrettyDebug, PrettyDebugWithSource, Text};
|
use nu_source::{PrettyDebug, PrettyDebugWithSource, Text};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
@ -1,9 +1,8 @@
|
|||||||
use crate::errors::ShellError;
|
use crate::hir::syntax_shape::FlatShape;
|
||||||
use crate::parser::hir::syntax_shape::FlatShape;
|
|
||||||
use crate::prelude::*;
|
|
||||||
use ansi_term::Color;
|
use ansi_term::Color;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_source::Spanned;
|
use nu_errors::ShellError;
|
||||||
|
use nu_source::{Spanned, Text};
|
||||||
use ptree::*;
|
use ptree::*;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::io;
|
use std::io;
|
@ -1,8 +1,9 @@
|
|||||||
use crate::parser::hir::Expression;
|
use crate::hir::Expression;
|
||||||
use crate::prelude::*;
|
|
||||||
use ansi_term::Color;
|
use ansi_term::Color;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_source::DebugDoc;
|
use nu_errors::ParseError;
|
||||||
|
use nu_protocol::ShellTypeName;
|
||||||
|
use nu_source::{DebugDoc, PrettyDebug, PrettyDebugWithSource, Text};
|
||||||
use ptree::*;
|
use ptree::*;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::io;
|
use std::io;
|
@ -1,5 +1,5 @@
|
|||||||
use crate::parser::hir::TokensIterator;
|
use crate::hir::TokensIterator;
|
||||||
use crate::parser::parse::token_tree_builder::TokenTreeBuilder as b;
|
use crate::parse::token_tree_builder::TokenTreeBuilder as b;
|
||||||
use crate::Span;
|
use crate::Span;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
41
crates/nu-parser/src/lib.rs
Normal file
41
crates/nu-parser/src/lib.rs
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
pub mod commands;
|
||||||
|
pub mod hir;
|
||||||
|
pub mod parse;
|
||||||
|
pub mod parse_command;
|
||||||
|
|
||||||
|
pub use crate::commands::classified::{
|
||||||
|
external::ExternalCommand, internal::InternalCommand, ClassifiedCommand, ClassifiedPipeline,
|
||||||
|
};
|
||||||
|
pub use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||||
|
pub use crate::hir::syntax_shape::{
|
||||||
|
expand_syntax, ExpandContext, ExpandSyntax, PipelineShape, SignatureRegistry,
|
||||||
|
};
|
||||||
|
pub use crate::hir::tokens_iterator::TokensIterator;
|
||||||
|
pub use crate::parse::files::Files;
|
||||||
|
pub use crate::parse::flag::Flag;
|
||||||
|
pub use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||||
|
pub use crate::parse::parser::Number;
|
||||||
|
pub use crate::parse::parser::{module, pipeline};
|
||||||
|
pub use crate::parse::token_tree::{Delimiter, TokenNode};
|
||||||
|
pub use crate::parse::token_tree_builder::TokenTreeBuilder;
|
||||||
|
|
||||||
|
use nu_errors::ShellError;
|
||||||
|
use nu_source::nom_input;
|
||||||
|
|
||||||
|
pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
|
||||||
|
let _ = pretty_env_logger::try_init();
|
||||||
|
|
||||||
|
match pipeline(nom_input(input)) {
|
||||||
|
Ok((_rest, val)) => Ok(val),
|
||||||
|
Err(err) => Err(ShellError::parse_error(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_script(input: &str) -> Result<TokenNode, ShellError> {
|
||||||
|
let _ = pretty_env_logger::try_init();
|
||||||
|
|
||||||
|
match module(nom_input(input)) {
|
||||||
|
Ok((_rest, val)) => Ok(val),
|
||||||
|
Err(err) => Err(ShellError::parse_error(err)),
|
||||||
|
}
|
||||||
|
}
|
@ -1,4 +1,5 @@
|
|||||||
pub(crate) mod call_node;
|
pub(crate) mod call_node;
|
||||||
|
pub(crate) mod comment;
|
||||||
pub(crate) mod files;
|
pub(crate) mod files;
|
||||||
pub(crate) mod flag;
|
pub(crate) mod flag;
|
||||||
pub(crate) mod operator;
|
pub(crate) mod operator;
|
@ -1,6 +1,6 @@
|
|||||||
use crate::parser::TokenNode;
|
use crate::TokenNode;
|
||||||
use crate::prelude::*;
|
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
|
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||||
pub struct CallNode {
|
pub struct CallNode {
|
42
crates/nu-parser/src/parse/comment.rs
Normal file
42
crates/nu-parser/src/parse/comment.rs
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
use derive_new::new;
|
||||||
|
use getset::Getters;
|
||||||
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||||
|
pub enum CommentKind {
|
||||||
|
Line,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
|
||||||
|
pub struct Comment {
|
||||||
|
pub(crate) kind: CommentKind,
|
||||||
|
pub(crate) text: Span,
|
||||||
|
pub(crate) span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Comment {
|
||||||
|
pub fn line(text: impl Into<Span>, outer: impl Into<Span>) -> Comment {
|
||||||
|
Comment {
|
||||||
|
kind: CommentKind::Line,
|
||||||
|
text: text.into(),
|
||||||
|
span: outer.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for Comment {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
let prefix = match self.kind {
|
||||||
|
CommentKind::Line => b::description("#"),
|
||||||
|
};
|
||||||
|
|
||||||
|
prefix + b::description(self.text.slice(source))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for Comment {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
@ -34,13 +34,15 @@ impl language_reporting::ReportingFiles for Files {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn location(&self, _file: Self::FileId, byte_index: usize) -> Option<Location> {
|
fn location(&self, _file: Self::FileId, byte_index: usize) -> Option<Location> {
|
||||||
|
trace!("finding location for {}", byte_index);
|
||||||
|
|
||||||
let source = &self.snippet;
|
let source = &self.snippet;
|
||||||
let mut seen_lines = 0;
|
let mut seen_lines = 0;
|
||||||
let mut seen_bytes = 0;
|
let mut seen_bytes = 0;
|
||||||
|
|
||||||
for (pos, slice) in source.match_indices('\n') {
|
for (pos, slice) in source.match_indices('\n') {
|
||||||
trace!(
|
trace!(
|
||||||
"SEARCH={} SEEN={} POS={} SLICE={:?} LEN={} ALL={:?}",
|
"searching byte_index={} seen_bytes={} pos={} slice={:?} slice.len={} source={:?}",
|
||||||
byte_index,
|
byte_index,
|
||||||
seen_bytes,
|
seen_bytes,
|
||||||
pos,
|
pos,
|
||||||
@ -50,9 +52,19 @@ impl language_reporting::ReportingFiles for Files {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if pos >= byte_index {
|
if pos >= byte_index {
|
||||||
|
trace!(
|
||||||
|
"returning {}:{} seen_lines={} byte_index={} pos={} seen_bytes={}",
|
||||||
|
seen_lines,
|
||||||
|
byte_index,
|
||||||
|
pos,
|
||||||
|
seen_lines,
|
||||||
|
byte_index,
|
||||||
|
seen_bytes
|
||||||
|
);
|
||||||
|
|
||||||
return Some(language_reporting::Location::new(
|
return Some(language_reporting::Location::new(
|
||||||
seen_lines,
|
seen_lines,
|
||||||
byte_index - seen_bytes,
|
byte_index - pos,
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
seen_lines += 1;
|
seen_lines += 1;
|
||||||
@ -61,30 +73,70 @@ impl language_reporting::ReportingFiles for Files {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if seen_lines == 0 {
|
if seen_lines == 0 {
|
||||||
Some(language_reporting::Location::new(0, byte_index))
|
trace!("seen_lines=0 end={}", source.len() - 1);
|
||||||
|
|
||||||
|
// if we got here, there were no newlines in the source
|
||||||
|
Some(language_reporting::Location::new(0, source.len() - 1))
|
||||||
} else {
|
} else {
|
||||||
panic!("byte index {} wasn't valid", byte_index);
|
trace!(
|
||||||
|
"last line seen_lines={} end={}",
|
||||||
|
seen_lines,
|
||||||
|
source.len() - 1 - byte_index
|
||||||
|
);
|
||||||
|
|
||||||
|
// if we got here and we didn't return, it should mean that we're talking about
|
||||||
|
// the last line
|
||||||
|
Some(language_reporting::Location::new(
|
||||||
|
seen_lines,
|
||||||
|
source.len() - 1 - byte_index,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> {
|
fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> {
|
||||||
|
trace!("finding line_span for {}", lineno);
|
||||||
|
|
||||||
let source = &self.snippet;
|
let source = &self.snippet;
|
||||||
let mut seen_lines = 0;
|
let mut seen_lines = 0;
|
||||||
let mut seen_bytes = 0;
|
let mut seen_bytes = 0;
|
||||||
|
|
||||||
for (pos, _) in source.match_indices('\n') {
|
for (pos, _) in source.match_indices('\n') {
|
||||||
|
trace!(
|
||||||
|
"lineno={} seen_lines={} seen_bytes={} pos={}",
|
||||||
|
lineno,
|
||||||
|
seen_lines,
|
||||||
|
seen_bytes,
|
||||||
|
pos
|
||||||
|
);
|
||||||
|
|
||||||
if seen_lines == lineno {
|
if seen_lines == lineno {
|
||||||
return Some(Span::new(seen_bytes, pos + 1));
|
trace!("returning start={} end={}", seen_bytes, pos);
|
||||||
|
// If the number of seen lines is the lineno, seen_bytes is the start of the
|
||||||
|
// line and pos is the end of the line
|
||||||
|
return Some(Span::new(seen_bytes, pos));
|
||||||
} else {
|
} else {
|
||||||
|
// If it's not, increment seen_lines, and move seen_bytes to the beginning of
|
||||||
|
// the next line
|
||||||
seen_lines += 1;
|
seen_lines += 1;
|
||||||
seen_bytes = pos + 1;
|
seen_bytes = pos + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if seen_lines == 0 {
|
if seen_lines == 0 {
|
||||||
|
trace!("returning start={} end={}", 0, self.snippet.len() - 1);
|
||||||
|
|
||||||
|
// if we got here, there were no newlines in the source
|
||||||
Some(Span::new(0, self.snippet.len() - 1))
|
Some(Span::new(0, self.snippet.len() - 1))
|
||||||
} else {
|
} else {
|
||||||
None
|
trace!(
|
||||||
|
"returning start={} end={}",
|
||||||
|
seen_bytes,
|
||||||
|
self.snippet.len() - 1
|
||||||
|
);
|
||||||
|
|
||||||
|
// if we got here and we didn't return, it should mean that we're talking about
|
||||||
|
// the last line
|
||||||
|
Some(Span::new(seen_bytes, self.snippet.len() - 1))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1,8 +1,7 @@
|
|||||||
use crate::parser::hir::syntax_shape::flat_shape::FlatShape;
|
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||||
use crate::prelude::*;
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::{Span, Spanned, SpannedItem};
|
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
106
crates/nu-parser/src/parse/operator.rs
Normal file
106
crates/nu-parser/src/parse/operator.rs
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||||
|
pub enum CompareOperator {
|
||||||
|
Equal,
|
||||||
|
NotEqual,
|
||||||
|
LessThan,
|
||||||
|
GreaterThan,
|
||||||
|
LessThanOrEqual,
|
||||||
|
GreaterThanOrEqual,
|
||||||
|
Contains,
|
||||||
|
NotContains,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for CompareOperator {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
b::operator(self.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CompareOperator {
|
||||||
|
pub fn print(&self) -> String {
|
||||||
|
self.as_str().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_str(&self) -> &str {
|
||||||
|
match *self {
|
||||||
|
CompareOperator::Equal => "==",
|
||||||
|
CompareOperator::NotEqual => "!=",
|
||||||
|
CompareOperator::LessThan => "<",
|
||||||
|
CompareOperator::GreaterThan => ">",
|
||||||
|
CompareOperator::LessThanOrEqual => "<=",
|
||||||
|
CompareOperator::GreaterThanOrEqual => ">=",
|
||||||
|
CompareOperator::Contains => "=~",
|
||||||
|
CompareOperator::NotContains => "!~",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for CompareOperator {
|
||||||
|
fn from(input: &str) -> CompareOperator {
|
||||||
|
CompareOperator::from_str(input).unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for CompareOperator {
|
||||||
|
type Err = ();
|
||||||
|
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
||||||
|
match input {
|
||||||
|
"==" => Ok(CompareOperator::Equal),
|
||||||
|
"!=" => Ok(CompareOperator::NotEqual),
|
||||||
|
"<" => Ok(CompareOperator::LessThan),
|
||||||
|
">" => Ok(CompareOperator::GreaterThan),
|
||||||
|
"<=" => Ok(CompareOperator::LessThanOrEqual),
|
||||||
|
">=" => Ok(CompareOperator::GreaterThanOrEqual),
|
||||||
|
"=~" => Ok(CompareOperator::Contains),
|
||||||
|
"!~" => Ok(CompareOperator::NotContains),
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||||
|
pub enum EvaluationOperator {
|
||||||
|
Dot,
|
||||||
|
DotDot,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for EvaluationOperator {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
b::operator(self.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EvaluationOperator {
|
||||||
|
pub fn print(&self) -> String {
|
||||||
|
self.as_str().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_str(&self) -> &str {
|
||||||
|
match *self {
|
||||||
|
EvaluationOperator::Dot => ".",
|
||||||
|
EvaluationOperator::DotDot => "..",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for EvaluationOperator {
|
||||||
|
fn from(input: &str) -> EvaluationOperator {
|
||||||
|
EvaluationOperator::from_str(input).unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for EvaluationOperator {
|
||||||
|
type Err = ();
|
||||||
|
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
||||||
|
match input {
|
||||||
|
"." => Ok(EvaluationOperator::Dot),
|
||||||
|
".." => Ok(EvaluationOperator::DotDot),
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
@ -1,8 +1,7 @@
|
|||||||
use crate::parser::TokenNode;
|
use crate::TokenNode;
|
||||||
use crate::prelude::*;
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::{DebugDocBuilder, PrettyDebugWithSource, Span, Spanned};
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
|
||||||
pub struct Pipeline {
|
pub struct Pipeline {
|
@ -1,10 +1,12 @@
|
|||||||
use crate::errors::{ParseError, ShellError};
|
use crate::parse::{call_node::*, comment::*, flag::*, operator::*, pipeline::*, tokens::*};
|
||||||
use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*};
|
|
||||||
use crate::prelude::*;
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::Spanned;
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_source::{Tagged, Text};
|
use nu_protocol::ShellTypeName;
|
||||||
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Tagged,
|
||||||
|
TaggedItem, Text,
|
||||||
|
};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||||
@ -16,7 +18,9 @@ pub enum TokenNode {
|
|||||||
Delimited(Spanned<DelimitedNode>),
|
Delimited(Spanned<DelimitedNode>),
|
||||||
Pipeline(Pipeline),
|
Pipeline(Pipeline),
|
||||||
Flag(Flag),
|
Flag(Flag),
|
||||||
|
Comment(Comment),
|
||||||
Whitespace(Span),
|
Whitespace(Span),
|
||||||
|
Separator(Span),
|
||||||
|
|
||||||
Error(Spanned<ShellError>),
|
Error(Spanned<ShellError>),
|
||||||
}
|
}
|
||||||
@ -37,14 +41,32 @@ impl PrettyDebugWithSource for TokenNode {
|
|||||||
"whitespace",
|
"whitespace",
|
||||||
b::description(format!("{:?}", space.slice(source))),
|
b::description(format!("{:?}", space.slice(source))),
|
||||||
),
|
),
|
||||||
|
TokenNode::Separator(span) => b::typed(
|
||||||
|
"separator",
|
||||||
|
b::description(format!("{:?}", span.slice(source))),
|
||||||
|
),
|
||||||
|
TokenNode::Comment(comment) => {
|
||||||
|
b::typed("comment", b::description(comment.text.slice(source)))
|
||||||
|
}
|
||||||
TokenNode::Error(_) => b::error("error"),
|
TokenNode::Error(_) => b::error("error"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasSpan for TokenNode {
|
impl ShellTypeName for TokenNode {
|
||||||
fn span(&self) -> Span {
|
fn type_name(&self) -> &'static str {
|
||||||
self.get_span()
|
match self {
|
||||||
|
TokenNode::Token(t) => t.type_name(),
|
||||||
|
TokenNode::Nodes(_) => "nodes",
|
||||||
|
TokenNode::Call(_) => "command",
|
||||||
|
TokenNode::Delimited(d) => d.type_name(),
|
||||||
|
TokenNode::Pipeline(_) => "pipeline",
|
||||||
|
TokenNode::Flag(_) => "flag",
|
||||||
|
TokenNode::Whitespace(_) => "whitespace",
|
||||||
|
TokenNode::Separator(_) => "separator",
|
||||||
|
TokenNode::Comment(_) => "comment",
|
||||||
|
TokenNode::Error(_) => "error",
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,12 +127,12 @@ impl fmt::Debug for DebugTokenNode<'_> {
|
|||||||
|
|
||||||
impl From<&TokenNode> for Span {
|
impl From<&TokenNode> for Span {
|
||||||
fn from(token: &TokenNode) -> Span {
|
fn from(token: &TokenNode) -> Span {
|
||||||
token.get_span()
|
token.span()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenNode {
|
impl HasSpan for TokenNode {
|
||||||
pub fn get_span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
match self {
|
match self {
|
||||||
TokenNode::Token(t) => t.span,
|
TokenNode::Token(t) => t.span,
|
||||||
TokenNode::Nodes(t) => t.span,
|
TokenNode::Nodes(t) => t.span,
|
||||||
@ -119,27 +141,14 @@ impl TokenNode {
|
|||||||
TokenNode::Pipeline(s) => s.span,
|
TokenNode::Pipeline(s) => s.span,
|
||||||
TokenNode::Flag(s) => s.span,
|
TokenNode::Flag(s) => s.span,
|
||||||
TokenNode::Whitespace(s) => *s,
|
TokenNode::Whitespace(s) => *s,
|
||||||
|
TokenNode::Separator(s) => *s,
|
||||||
|
TokenNode::Comment(c) => c.span(),
|
||||||
TokenNode::Error(s) => s.span,
|
TokenNode::Error(s) => s.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn type_name(&self) -> &'static str {
|
impl TokenNode {
|
||||||
match self {
|
|
||||||
TokenNode::Token(t) => t.type_name(),
|
|
||||||
TokenNode::Nodes(_) => "nodes",
|
|
||||||
TokenNode::Call(_) => "command",
|
|
||||||
TokenNode::Delimited(d) => d.type_name(),
|
|
||||||
TokenNode::Pipeline(_) => "pipeline",
|
|
||||||
TokenNode::Flag(_) => "flag",
|
|
||||||
TokenNode::Whitespace(_) => "whitespace",
|
|
||||||
TokenNode::Error(_) => "error",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
|
|
||||||
self.type_name().spanned(self.span())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||||
self.type_name().tagged(self.span())
|
self.type_name().tagged(self.span())
|
||||||
}
|
}
|
||||||
@ -242,7 +251,7 @@ impl TokenNode {
|
|||||||
pub fn is_dot(&self) -> bool {
|
pub fn is_dot(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
TokenNode::Token(Token {
|
TokenNode::Token(Token {
|
||||||
unspanned: UnspannedToken::Operator(Operator::Dot),
|
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
||||||
..
|
..
|
||||||
}) => true,
|
}) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
@ -419,7 +428,7 @@ impl TokenNode {
|
|||||||
pub fn expect_dot(&self) -> Span {
|
pub fn expect_dot(&self) -> Span {
|
||||||
match self {
|
match self {
|
||||||
TokenNode::Token(Token {
|
TokenNode::Token(Token {
|
||||||
unspanned: UnspannedToken::Operator(Operator::Dot),
|
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
||||||
span,
|
span,
|
||||||
}) => *span,
|
}) => *span,
|
||||||
other => panic!("Expected dot, found {:?}", other),
|
other => panic!("Expected dot, found {:?}", other),
|
@ -1,12 +1,13 @@
|
|||||||
use crate::prelude::*;
|
use crate::parse::call_node::CallNode;
|
||||||
|
use crate::parse::comment::Comment;
|
||||||
use crate::parser::parse::flag::{Flag, FlagKind};
|
use crate::parse::flag::{Flag, FlagKind};
|
||||||
use crate::parser::parse::operator::Operator;
|
use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||||
use crate::parser::parse::pipeline::{Pipeline, PipelineElement};
|
use crate::parse::pipeline::{Pipeline, PipelineElement};
|
||||||
use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||||
use crate::parser::parse::tokens::{RawNumber, UnspannedToken};
|
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
||||||
use crate::parser::CallNode;
|
use bigdecimal::BigDecimal;
|
||||||
use nu_source::Spanned;
|
use nu_source::{Span, Spanned, SpannedItem};
|
||||||
|
use num_bigint::BigInt;
|
||||||
|
|
||||||
pub struct TokenTreeBuilder {
|
pub struct TokenTreeBuilder {
|
||||||
pos: usize,
|
pos: usize,
|
||||||
@ -96,7 +97,7 @@ impl TokenTreeBuilder {
|
|||||||
TokenNode::Nodes(input.spanned(span.into()))
|
TokenNode::Nodes(input.spanned(span.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn op(input: impl Into<Operator>) -> CurriedToken {
|
pub fn op(input: impl Into<CompareOperator>) -> CurriedToken {
|
||||||
let input = input.into();
|
let input = input.into();
|
||||||
|
|
||||||
Box::new(move |b| {
|
Box::new(move |b| {
|
||||||
@ -104,12 +105,39 @@ impl TokenTreeBuilder {
|
|||||||
|
|
||||||
b.pos = end;
|
b.pos = end;
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_op(input, Span::new(start, end))
|
TokenTreeBuilder::spanned_cmp_op(input, Span::new(start, end))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_op(input: impl Into<Operator>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_cmp_op(input: impl Into<CompareOperator>, span: impl Into<Span>) -> TokenNode {
|
||||||
TokenNode::Token(UnspannedToken::Operator(input.into()).into_token(span))
|
TokenNode::Token(UnspannedToken::CompareOperator(input.into()).into_token(span))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dot() -> CurriedToken {
|
||||||
|
Box::new(move |b| {
|
||||||
|
let (start, end) = b.consume(".");
|
||||||
|
|
||||||
|
b.pos = end;
|
||||||
|
|
||||||
|
TokenTreeBuilder::spanned_eval_op(".", Span::new(start, end))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dotdot() -> CurriedToken {
|
||||||
|
Box::new(move |b| {
|
||||||
|
let (start, end) = b.consume("..");
|
||||||
|
|
||||||
|
b.pos = end;
|
||||||
|
|
||||||
|
TokenTreeBuilder::spanned_eval_op("..", Span::new(start, end))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spanned_eval_op(
|
||||||
|
input: impl Into<EvaluationOperator>,
|
||||||
|
span: impl Into<Span>,
|
||||||
|
) -> TokenNode {
|
||||||
|
TokenNode::Token(UnspannedToken::EvaluationOperator(input.into()).into_token(span))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
pub fn string(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -398,6 +426,36 @@ impl TokenTreeBuilder {
|
|||||||
TokenNode::Whitespace(span.into())
|
TokenNode::Whitespace(span.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn sep(input: impl Into<String>) -> CurriedToken {
|
||||||
|
let input = input.into();
|
||||||
|
|
||||||
|
Box::new(move |b| {
|
||||||
|
let (start, end) = b.consume(&input);
|
||||||
|
TokenTreeBuilder::spanned_sep(Span::new(start, end))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spanned_sep(span: impl Into<Span>) -> TokenNode {
|
||||||
|
TokenNode::Separator(span.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn comment(input: impl Into<String>) -> CurriedToken {
|
||||||
|
let input = input.into();
|
||||||
|
|
||||||
|
Box::new(move |b| {
|
||||||
|
let outer_start = b.pos;
|
||||||
|
b.consume("#");
|
||||||
|
let (start, end) = b.consume(&input);
|
||||||
|
let outer_end = b.pos;
|
||||||
|
|
||||||
|
TokenTreeBuilder::spanned_comment((start, end), (outer_start, outer_end))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||||
|
TokenNode::Comment(Comment::line(input, span))
|
||||||
|
}
|
||||||
|
|
||||||
fn consume(&mut self, input: &str) -> (usize, usize) {
|
fn consume(&mut self, input: &str) -> (usize, usize) {
|
||||||
let start = self.pos;
|
let start = self.pos;
|
||||||
self.pos += input.len();
|
self.pos += input.len();
|
@ -1,13 +1,20 @@
|
|||||||
use crate::parser::Operator;
|
use crate::parse::parser::Number;
|
||||||
use crate::prelude::*;
|
use crate::{CompareOperator, EvaluationOperator};
|
||||||
use nu_source::{Spanned, Text};
|
use bigdecimal::BigDecimal;
|
||||||
|
use nu_protocol::ShellTypeName;
|
||||||
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||||
|
Text,
|
||||||
|
};
|
||||||
|
use num_bigint::BigInt;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
pub enum UnspannedToken {
|
pub enum UnspannedToken {
|
||||||
Number(RawNumber),
|
Number(RawNumber),
|
||||||
Operator(Operator),
|
CompareOperator(CompareOperator),
|
||||||
|
EvaluationOperator(EvaluationOperator),
|
||||||
String(Span),
|
String(Span),
|
||||||
Variable(Span),
|
Variable(Span),
|
||||||
ExternalCommand(Span),
|
ExternalCommand(Span),
|
||||||
@ -29,7 +36,9 @@ impl ShellTypeName for UnspannedToken {
|
|||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
UnspannedToken::Number(_) => "number",
|
UnspannedToken::Number(_) => "number",
|
||||||
UnspannedToken::Operator(..) => "operator",
|
UnspannedToken::CompareOperator(..) => "comparison operator",
|
||||||
|
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => "dot",
|
||||||
|
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => "dotdot",
|
||||||
UnspannedToken::String(_) => "string",
|
UnspannedToken::String(_) => "string",
|
||||||
UnspannedToken::Variable(_) => "variable",
|
UnspannedToken::Variable(_) => "variable",
|
||||||
UnspannedToken::ExternalCommand(_) => "syntax error",
|
UnspannedToken::ExternalCommand(_) => "syntax error",
|
||||||
@ -105,7 +114,8 @@ impl PrettyDebugWithSource for Token {
|
|||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match self.unspanned {
|
match self.unspanned {
|
||||||
UnspannedToken::Number(number) => number.pretty_debug(source),
|
UnspannedToken::Number(number) => number.pretty_debug(source),
|
||||||
UnspannedToken::Operator(operator) => operator.pretty(),
|
UnspannedToken::CompareOperator(operator) => operator.pretty(),
|
||||||
|
UnspannedToken::EvaluationOperator(operator) => operator.pretty(),
|
||||||
UnspannedToken::String(_) => b::primitive(self.span.slice(source)),
|
UnspannedToken::String(_) => b::primitive(self.span.slice(source)),
|
||||||
UnspannedToken::Variable(_) => b::var(self.span.slice(source)),
|
UnspannedToken::Variable(_) => b::var(self.span.slice(source)),
|
||||||
UnspannedToken::ExternalCommand(_) => b::primitive(self.span.slice(source)),
|
UnspannedToken::ExternalCommand(_) => b::primitive(self.span.slice(source)),
|
||||||
@ -149,9 +159,9 @@ impl Token {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_operator(&self) -> Option<Spanned<Operator>> {
|
pub fn extract_operator(&self) -> Option<Spanned<CompareOperator>> {
|
||||||
match self.unspanned {
|
match self.unspanned {
|
||||||
UnspannedToken::Operator(operator) => Some(operator.spanned(self.span)),
|
UnspannedToken::CompareOperator(operator) => Some(operator.spanned(self.span)),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,4 +1,7 @@
|
|||||||
use crate::prelude::*;
|
use crate::parse::parser::Number;
|
||||||
|
use nu_protocol::{Primitive, UntaggedValue};
|
||||||
|
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||||
|
use num_traits::ToPrimitive;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
@ -25,7 +28,7 @@ pub enum Unit {
|
|||||||
|
|
||||||
impl PrettyDebug for Unit {
|
impl PrettyDebug for Unit {
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
b::keyword(format!("{:?}", self))
|
b::keyword(self.as_str())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -55,31 +58,40 @@ impl Unit {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn compute(&self, size: &Number) -> UntaggedValue {
|
pub fn compute(&self, size: &Number) -> UntaggedValue {
|
||||||
let size = size.clone();
|
let size = size.clone();
|
||||||
|
|
||||||
match &self {
|
match &self {
|
||||||
Unit::Byte => UntaggedValue::number(size),
|
Unit::Byte => number(size),
|
||||||
Unit::Kilobyte => UntaggedValue::number(size * 1024),
|
Unit::Kilobyte => number(size * 1024),
|
||||||
Unit::Megabyte => UntaggedValue::number(size * 1024 * 1024),
|
Unit::Megabyte => number(size * 1024 * 1024),
|
||||||
Unit::Gigabyte => UntaggedValue::number(size * 1024 * 1024 * 1024),
|
Unit::Gigabyte => number(size * 1024 * 1024 * 1024),
|
||||||
Unit::Terabyte => UntaggedValue::number(size * 1024 * 1024 * 1024 * 1024),
|
Unit::Terabyte => number(size * 1024 * 1024 * 1024 * 1024),
|
||||||
Unit::Petabyte => UntaggedValue::number(size * 1024 * 1024 * 1024 * 1024 * 1024),
|
Unit::Petabyte => number(size * 1024 * 1024 * 1024 * 1024 * 1024),
|
||||||
Unit::Second => UntaggedValue::duration(convert_number_to_u64(&size)),
|
Unit::Second => duration(convert_number_to_u64(&size)),
|
||||||
Unit::Minute => UntaggedValue::duration(60 * convert_number_to_u64(&size)),
|
Unit::Minute => duration(60 * convert_number_to_u64(&size)),
|
||||||
Unit::Hour => UntaggedValue::duration(60 * 60 * convert_number_to_u64(&size)),
|
Unit::Hour => duration(60 * 60 * convert_number_to_u64(&size)),
|
||||||
Unit::Day => UntaggedValue::duration(24 * 60 * 60 * convert_number_to_u64(&size)),
|
Unit::Day => duration(24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||||
Unit::Week => UntaggedValue::duration(7 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
Unit::Week => duration(7 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||||
Unit::Month => {
|
Unit::Month => duration(30 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||||
UntaggedValue::duration(30 * 24 * 60 * 60 * convert_number_to_u64(&size))
|
Unit::Year => duration(365 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||||
}
|
|
||||||
Unit::Year => {
|
|
||||||
UntaggedValue::duration(365 * 24 * 60 * 60 * convert_number_to_u64(&size))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn number(number: impl Into<Number>) -> UntaggedValue {
|
||||||
|
let number = number.into();
|
||||||
|
|
||||||
|
match number {
|
||||||
|
Number::Int(int) => UntaggedValue::Primitive(Primitive::Int(int)),
|
||||||
|
Number::Decimal(decimal) => UntaggedValue::Primitive(Primitive::Decimal(decimal)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn duration(secs: u64) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Duration(secs))
|
||||||
|
}
|
||||||
|
|
||||||
impl FromStr for Unit {
|
impl FromStr for Unit {
|
||||||
type Err = ();
|
type Err = ();
|
||||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
@ -1,17 +1,17 @@
|
|||||||
use crate::errors::{ArgumentError, ParseError};
|
use crate::hir::syntax_shape::{
|
||||||
use crate::parser::hir::syntax_shape::{
|
|
||||||
color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced,
|
color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced,
|
||||||
BackoffColoringMode, ColorSyntax, MaybeSpaceShape,
|
BackoffColoringMode, ColorSyntax, MaybeSpaceShape,
|
||||||
};
|
};
|
||||||
use crate::parser::registry::{NamedType, PositionalType, Signature};
|
use crate::TokensIterator;
|
||||||
use crate::parser::TokensIterator;
|
use crate::{
|
||||||
use crate::parser::{
|
|
||||||
hir::{self, ExpandContext, NamedArguments},
|
hir::{self, ExpandContext, NamedArguments},
|
||||||
Flag,
|
Flag,
|
||||||
};
|
};
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_source::{PrettyDebugWithSource, Text};
|
use nu_source::{PrettyDebugWithSource, Span, Spanned, SpannedItem, Text};
|
||||||
use nu_source::{Span, Spanned, SpannedItem};
|
|
||||||
|
use nu_errors::{ArgumentError, ParseError};
|
||||||
|
use nu_protocol::{NamedType, PositionalType, Signature};
|
||||||
|
|
||||||
pub fn parse_command_tail(
|
pub fn parse_command_tail(
|
||||||
config: &Signature,
|
config: &Signature,
|
||||||
@ -183,198 +183,6 @@ impl ColoringArgs {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct CommandTailShape;
|
pub struct CommandTailShape;
|
||||||
|
|
||||||
#[cfg(not(coloring_in_tokens))]
|
|
||||||
impl ColorSyntax for CommandTailShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = Signature;
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
signature: &Signature,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
||||||
) -> Self::Info {
|
|
||||||
let mut args = ColoringArgs::new(token_nodes.len());
|
|
||||||
for (name, kind) in &signature.named {
|
|
||||||
trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind);
|
|
||||||
|
|
||||||
match &kind.0 {
|
|
||||||
NamedType::Switch => {
|
|
||||||
match token_nodes.extract(|t| t.as_flag(name, context.source())) {
|
|
||||||
Some((pos, flag)) => args.insert(pos, vec![flag.color()]),
|
|
||||||
None => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NamedType::Mandatory(syntax_type) => {
|
|
||||||
match extract_mandatory(
|
|
||||||
signature,
|
|
||||||
name,
|
|
||||||
token_nodes,
|
|
||||||
context.source(),
|
|
||||||
Span::unknown(),
|
|
||||||
) {
|
|
||||||
Err(_) => {
|
|
||||||
// The mandatory flag didn't exist at all, so there's nothing to color
|
|
||||||
}
|
|
||||||
Ok((pos, flag)) => {
|
|
||||||
let mut shapes = vec![flag.color()];
|
|
||||||
token_nodes.move_to(pos);
|
|
||||||
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
token_nodes.restart();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// We can live with unmatched syntax after a mandatory flag
|
|
||||||
let _ = token_nodes.atomic(|token_nodes| {
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
|
|
||||||
|
|
||||||
// If the part after a mandatory flag isn't present, that's ok, but we
|
|
||||||
// should roll back any whitespace we chomped
|
|
||||||
color_fallible_syntax(
|
|
||||||
syntax_type,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
&mut shapes,
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
token_nodes.restart();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NamedType::Optional(syntax_type) => {
|
|
||||||
match extract_optional(name, token_nodes, context.source()) {
|
|
||||||
Err(_) => {
|
|
||||||
// The optional flag didn't exist at all, so there's nothing to color
|
|
||||||
}
|
|
||||||
Ok(Some((pos, flag))) => {
|
|
||||||
let mut shapes = vec![flag.color()];
|
|
||||||
token_nodes.move_to(pos);
|
|
||||||
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
token_nodes.restart();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// We can live with unmatched syntax after an optional flag
|
|
||||||
let _ = token_nodes.atomic(|token_nodes| {
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
|
|
||||||
|
|
||||||
// If the part after a mandatory flag isn't present, that's ok, but we
|
|
||||||
// should roll back any whitespace we chomped
|
|
||||||
color_fallible_syntax(
|
|
||||||
syntax_type,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
&mut shapes,
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
token_nodes.restart();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(None) => {
|
|
||||||
token_nodes.restart();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
for arg in &signature.positional {
|
|
||||||
trace!("Processing positional {:?}", arg);
|
|
||||||
|
|
||||||
match arg.0 {
|
|
||||||
PositionalType::Mandatory(..) => {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
PositionalType::Optional(..) => {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut shapes = vec![];
|
|
||||||
let pos = token_nodes.pos(false);
|
|
||||||
|
|
||||||
match pos {
|
|
||||||
None => break,
|
|
||||||
Some(pos) => {
|
|
||||||
// We can live with an unmatched positional argument. Hopefully it will be
|
|
||||||
// matched by a future token
|
|
||||||
let _ = token_nodes.atomic(|token_nodes| {
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
|
|
||||||
|
|
||||||
// If no match, we should roll back any whitespace we chomped
|
|
||||||
color_fallible_syntax(
|
|
||||||
&arg.0.syntax_type(),
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
&mut shapes,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some((syntax_type, _)) = signature.rest_positional {
|
|
||||||
loop {
|
|
||||||
if token_nodes.at_end_possible_ws() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let pos = token_nodes.pos(false);
|
|
||||||
|
|
||||||
match pos {
|
|
||||||
None => break,
|
|
||||||
Some(pos) => {
|
|
||||||
let mut shapes = vec![];
|
|
||||||
|
|
||||||
// If any arguments don't match, we'll fall back to backoff coloring mode
|
|
||||||
let result = token_nodes.atomic(|token_nodes| {
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
|
|
||||||
|
|
||||||
// If no match, we should roll back any whitespace we chomped
|
|
||||||
color_fallible_syntax(&syntax_type, token_nodes, context, &mut shapes)?;
|
|
||||||
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Err(_) => break,
|
|
||||||
Ok(_) => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
args.spread_shapes(shapes);
|
|
||||||
|
|
||||||
// Consume any remaining tokens with backoff coloring mode
|
|
||||||
color_syntax(&BackoffColoringMode, token_nodes, context, shapes);
|
|
||||||
|
|
||||||
shapes.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(coloring_in_tokens)]
|
|
||||||
impl ColorSyntax for CommandTailShape {
|
impl ColorSyntax for CommandTailShape {
|
||||||
type Info = ();
|
type Info = ();
|
||||||
type Input = Signature;
|
type Input = Signature;
|
||||||
@ -389,7 +197,7 @@ impl ColorSyntax for CommandTailShape {
|
|||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
context: &ExpandContext,
|
||||||
) -> Self::Info {
|
) -> Self::Info {
|
||||||
use crate::parser::hir::syntax_shape::SyntaxShape;
|
use nu_protocol::SyntaxShape;
|
||||||
|
|
||||||
let mut args = ColoringArgs::new(token_nodes.len());
|
let mut args = ColoringArgs::new(token_nodes.len());
|
||||||
trace_remaining("nodes", &token_nodes, context.source());
|
trace_remaining("nodes", &token_nodes, context.source());
|
42
crates/nu-protocol/Cargo.toml
Normal file
42
crates/nu-protocol/Cargo.toml
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
[package]
|
||||||
|
name = "nu-protocol"
|
||||||
|
version = "0.7.0"
|
||||||
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
|
edition = "2018"
|
||||||
|
description = "Core values and protocols for Nushell"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-source = { path = "../nu-source", version = "0.7.0" }
|
||||||
|
nu-errors = { path = "../nu-errors", version = "0.7.0" }
|
||||||
|
|
||||||
|
serde = { version = "1.0.103", features = ["derive"] }
|
||||||
|
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||||
|
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||||
|
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||||
|
chrono = { version = "0.4.10", features = ["serde"] }
|
||||||
|
num-traits = "0.2.8"
|
||||||
|
serde_bytes = "0.11.3"
|
||||||
|
getset = "0.0.9"
|
||||||
|
derive-new = "0.5.8"
|
||||||
|
ansi_term = "0.12.1"
|
||||||
|
language-reporting = "0.4.0"
|
||||||
|
nom = "5.0.1"
|
||||||
|
nom_locate = "1.0.0"
|
||||||
|
nom-tracable = "0.4.1"
|
||||||
|
typetag = "0.1.4"
|
||||||
|
query_interface = "0.3.5"
|
||||||
|
byte-unit = "3.0.3"
|
||||||
|
chrono-humanize = "0.0.11"
|
||||||
|
natural = "0.3.0"
|
||||||
|
|
||||||
|
# implement conversions
|
||||||
|
subprocess = "0.1.18"
|
||||||
|
serde_yaml = "0.8"
|
||||||
|
toml = "0.5.5"
|
||||||
|
serde_json = "1.0.44"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
nu-build = { version = "0.7.0", path = "../nu-build" }
|
3
crates/nu-protocol/build.rs
Normal file
3
crates/nu-protocol/build.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
nu_build::build()
|
||||||
|
}
|
93
crates/nu-protocol/src/call_info.rs
Normal file
93
crates/nu-protocol/src/call_info.rs
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
use crate::value::Value;
|
||||||
|
use derive_new::new;
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
use nu_errors::ShellError;
|
||||||
|
use nu_source::Tag;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||||
|
pub struct CallInfo {
|
||||||
|
pub args: EvaluatedArgs,
|
||||||
|
pub name_tag: Tag,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, new, Serialize, Deserialize, Clone)]
|
||||||
|
pub struct EvaluatedArgs {
|
||||||
|
pub positional: Option<Vec<Value>>,
|
||||||
|
pub named: Option<IndexMap<String, Value>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EvaluatedArgs {
|
||||||
|
pub fn slice_from(&self, from: usize) -> Vec<Value> {
|
||||||
|
let positional = &self.positional;
|
||||||
|
|
||||||
|
match positional {
|
||||||
|
None => vec![],
|
||||||
|
Some(list) => list[from..].to_vec(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nth(&self, pos: usize) -> Option<&Value> {
|
||||||
|
match &self.positional {
|
||||||
|
None => None,
|
||||||
|
Some(array) => array.iter().nth(pos),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> {
|
||||||
|
match &self.positional {
|
||||||
|
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
||||||
|
Some(array) => match array.iter().nth(pos) {
|
||||||
|
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
||||||
|
Some(item) => Ok(item),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
match &self.positional {
|
||||||
|
None => 0,
|
||||||
|
Some(array) => array.len(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has(&self, name: &str) -> bool {
|
||||||
|
match &self.named {
|
||||||
|
None => false,
|
||||||
|
Some(named) => named.contains_key(name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(&self, name: &str) -> Option<&Value> {
|
||||||
|
match &self.named {
|
||||||
|
None => None,
|
||||||
|
Some(named) => named.get(name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn positional_iter(&self) -> PositionalIter<'_> {
|
||||||
|
match &self.positional {
|
||||||
|
None => PositionalIter::Empty,
|
||||||
|
Some(v) => {
|
||||||
|
let iter = v.iter();
|
||||||
|
PositionalIter::Array(iter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum PositionalIter<'a> {
|
||||||
|
Empty,
|
||||||
|
Array(std::slice::Iter<'a, Value>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for PositionalIter<'a> {
|
||||||
|
type Item = &'a Value;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
match self {
|
||||||
|
PositionalIter::Empty => None,
|
||||||
|
PositionalIter::Array(iter) => iter.next(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
28
crates/nu-protocol/src/lib.rs
Normal file
28
crates/nu-protocol/src/lib.rs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
#[macro_use]
|
||||||
|
mod macros;
|
||||||
|
|
||||||
|
mod call_info;
|
||||||
|
mod maybe_owned;
|
||||||
|
mod plugin;
|
||||||
|
mod return_value;
|
||||||
|
mod signature;
|
||||||
|
mod syntax_shape;
|
||||||
|
mod type_name;
|
||||||
|
mod type_shape;
|
||||||
|
mod value;
|
||||||
|
|
||||||
|
pub use crate::call_info::{CallInfo, EvaluatedArgs};
|
||||||
|
pub use crate::maybe_owned::MaybeOwned;
|
||||||
|
pub use crate::plugin::{serve_plugin, Plugin};
|
||||||
|
pub use crate::return_value::{CommandAction, ReturnSuccess, ReturnValue};
|
||||||
|
pub use crate::signature::{NamedType, PositionalType, Signature};
|
||||||
|
pub use crate::syntax_shape::SyntaxShape;
|
||||||
|
pub use crate::type_name::{PrettyType, ShellTypeName, SpannedTypeName};
|
||||||
|
pub use crate::type_shape::{Row as RowType, Type};
|
||||||
|
pub use crate::value::column_path::{did_you_mean, ColumnPath, PathMember, UnspannedPathMember};
|
||||||
|
pub use crate::value::dict::{Dictionary, TaggedDictBuilder};
|
||||||
|
pub use crate::value::evaluate::{Evaluate, EvaluateTrait, Scope};
|
||||||
|
pub use crate::value::primitive::format_primitive;
|
||||||
|
pub use crate::value::primitive::Primitive;
|
||||||
|
pub use crate::value::range::{Range, RangeInclusion};
|
||||||
|
pub use crate::value::{UntaggedValue, Value};
|
12
crates/nu-protocol/src/macros.rs
Normal file
12
crates/nu-protocol/src/macros.rs
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
// These macros exist to differentiate between intentional writing to stdout
|
||||||
|
// and stray printlns left by accident
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! outln {
|
||||||
|
($($tokens:tt)*) => { println!($($tokens)*) }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! errln {
|
||||||
|
($($tokens:tt)*) => { eprintln!($($tokens)*) }
|
||||||
|
}
|
14
crates/nu-protocol/src/maybe_owned.rs
Normal file
14
crates/nu-protocol/src/maybe_owned.rs
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
#[derive(Debug)]
|
||||||
|
pub enum MaybeOwned<'a, T> {
|
||||||
|
Owned(T),
|
||||||
|
Borrowed(&'a T),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> MaybeOwned<'_, T> {
|
||||||
|
pub fn borrow(&self) -> &T {
|
||||||
|
match self {
|
||||||
|
MaybeOwned::Owned(v) => v,
|
||||||
|
MaybeOwned::Borrowed(v) => v,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,8 @@
|
|||||||
use crate::Signature;
|
use crate::call_info::CallInfo;
|
||||||
use crate::{CallInfo, ReturnValue, ShellError, Value};
|
use crate::return_value::ReturnValue;
|
||||||
|
use crate::signature::Signature;
|
||||||
|
use crate::value::Value;
|
||||||
|
use nu_errors::ShellError;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
80
crates/nu-protocol/src/return_value.rs
Normal file
80
crates/nu-protocol/src/return_value.rs
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
use crate::value::Value;
|
||||||
|
use nu_errors::ShellError;
|
||||||
|
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum CommandAction {
|
||||||
|
ChangePath(String),
|
||||||
|
Exit,
|
||||||
|
Error(ShellError),
|
||||||
|
EnterShell(String),
|
||||||
|
AutoConvert(Value, String),
|
||||||
|
EnterValueShell(Value),
|
||||||
|
EnterHelpShell(Value),
|
||||||
|
PreviousShell,
|
||||||
|
NextShell,
|
||||||
|
LeaveShell,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for CommandAction {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
match self {
|
||||||
|
CommandAction::ChangePath(path) => b::typed("change path", b::description(path)),
|
||||||
|
CommandAction::Exit => b::description("exit"),
|
||||||
|
CommandAction::Error(_) => b::error("error"),
|
||||||
|
CommandAction::AutoConvert(_, extension) => {
|
||||||
|
b::typed("auto convert", b::description(extension))
|
||||||
|
}
|
||||||
|
CommandAction::EnterShell(s) => b::typed("enter shell", b::description(s)),
|
||||||
|
CommandAction::EnterValueShell(v) => b::typed("enter value shell", v.pretty()),
|
||||||
|
CommandAction::EnterHelpShell(v) => b::typed("enter help shell", v.pretty()),
|
||||||
|
CommandAction::PreviousShell => b::description("previous shell"),
|
||||||
|
CommandAction::NextShell => b::description("next shell"),
|
||||||
|
CommandAction::LeaveShell => b::description("leave shell"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum ReturnSuccess {
|
||||||
|
Value(Value),
|
||||||
|
DebugValue(Value),
|
||||||
|
Action(CommandAction),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for ReturnSuccess {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
match self {
|
||||||
|
ReturnSuccess::Value(value) => b::typed("value", value.pretty()),
|
||||||
|
ReturnSuccess::DebugValue(value) => b::typed("debug value", value.pretty()),
|
||||||
|
ReturnSuccess::Action(action) => b::typed("action", action.pretty()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type ReturnValue = Result<ReturnSuccess, ShellError>;
|
||||||
|
|
||||||
|
impl Into<ReturnValue> for Value {
|
||||||
|
fn into(self) -> ReturnValue {
|
||||||
|
Ok(ReturnSuccess::Value(self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ReturnSuccess {
|
||||||
|
pub fn change_cwd(path: String) -> ReturnValue {
|
||||||
|
Ok(ReturnSuccess::Action(CommandAction::ChangePath(path)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn value(input: impl Into<Value>) -> ReturnValue {
|
||||||
|
Ok(ReturnSuccess::Value(input.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn debug_value(input: impl Into<Value>) -> ReturnValue {
|
||||||
|
Ok(ReturnSuccess::DebugValue(input.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn action(input: CommandAction) -> ReturnValue {
|
||||||
|
Ok(ReturnSuccess::Action(input))
|
||||||
|
}
|
||||||
|
}
|
@ -1,11 +1,7 @@
|
|||||||
// TODO: Temporary redirect
|
use crate::syntax_shape::SyntaxShape;
|
||||||
pub(crate) use crate::context::CommandRegistry;
|
use crate::type_shape::Type;
|
||||||
use crate::evaluate::{evaluate_baseline_expr, Scope};
|
|
||||||
use crate::parser::{hir, hir::SyntaxShape};
|
|
||||||
use crate::prelude::*;
|
|
||||||
use derive_new::new;
|
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
|
use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
@ -25,12 +21,12 @@ impl PrettyDebug for PositionalType {
|
|||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
PositionalType::Mandatory(string, shape) => {
|
PositionalType::Mandatory(string, shape) => {
|
||||||
b::description(string) + b::delimit("(", shape.pretty(), ")").as_kind().group()
|
b::description(string) + b::delimit("(", shape.pretty(), ")").into_kind().group()
|
||||||
}
|
}
|
||||||
PositionalType::Optional(string, shape) => {
|
PositionalType::Optional(string, shape) => {
|
||||||
b::description(string)
|
b::description(string)
|
||||||
+ b::operator("?")
|
+ b::operator("?")
|
||||||
+ b::delimit("(", shape.pretty(), ")").as_kind().group()
|
+ b::delimit("(", shape.pretty(), ")").into_kind().group()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -57,14 +53,14 @@ impl PositionalType {
|
|||||||
PositionalType::Optional(name.to_string(), SyntaxShape::Any)
|
PositionalType::Optional(name.to_string(), SyntaxShape::Any)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn name(&self) -> &str {
|
pub fn name(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
PositionalType::Mandatory(s, _) => s,
|
PositionalType::Mandatory(s, _) => s,
|
||||||
PositionalType::Optional(s, _) => s,
|
PositionalType::Optional(s, _) => s,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn syntax_type(&self) -> SyntaxShape {
|
pub fn syntax_type(&self) -> SyntaxShape {
|
||||||
match *self {
|
match *self {
|
||||||
PositionalType::Mandatory(_, t) => t,
|
PositionalType::Mandatory(_, t) => t,
|
||||||
PositionalType::Optional(_, t) => t,
|
PositionalType::Optional(_, t) => t,
|
||||||
@ -81,6 +77,8 @@ pub struct Signature {
|
|||||||
pub positional: Vec<(PositionalType, Description)>,
|
pub positional: Vec<(PositionalType, Description)>,
|
||||||
pub rest_positional: Option<(SyntaxShape, Description)>,
|
pub rest_positional: Option<(SyntaxShape, Description)>,
|
||||||
pub named: IndexMap<String, (NamedType, Description)>,
|
pub named: IndexMap<String, (NamedType, Description)>,
|
||||||
|
pub yields: Option<Type>,
|
||||||
|
pub input: Option<Type>,
|
||||||
pub is_filter: bool,
|
pub is_filter: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,14 +101,16 @@ impl PrettyDebugWithSource for Signature {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Signature {
|
impl Signature {
|
||||||
pub fn new(name: String) -> Signature {
|
pub fn new(name: impl Into<String>) -> Signature {
|
||||||
Signature {
|
Signature {
|
||||||
name,
|
name: name.into(),
|
||||||
usage: String::new(),
|
usage: String::new(),
|
||||||
positional: vec![],
|
positional: vec![],
|
||||||
rest_positional: None,
|
rest_positional: None,
|
||||||
named: IndexMap::new(),
|
named: IndexMap::new(),
|
||||||
is_filter: false,
|
is_filter: false,
|
||||||
|
yields: None,
|
||||||
|
input: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -191,136 +191,14 @@ impl Signature {
|
|||||||
self.rest_positional = Some((ty, desc.into()));
|
self.rest_positional = Some((ty, desc.into()));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, new, Serialize, Deserialize, Clone)]
|
pub fn yields(mut self, ty: Type) -> Signature {
|
||||||
pub struct EvaluatedArgs {
|
self.yields = Some(ty);
|
||||||
pub positional: Option<Vec<Value>>,
|
self
|
||||||
pub named: Option<IndexMap<String, Value>>,
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl EvaluatedArgs {
|
pub fn input(mut self, ty: Type) -> Signature {
|
||||||
pub fn slice_from(&self, from: usize) -> Vec<Value> {
|
self.input = Some(ty);
|
||||||
let positional = &self.positional;
|
self
|
||||||
|
|
||||||
match positional {
|
|
||||||
None => vec![],
|
|
||||||
Some(list) => list[from..].to_vec(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EvaluatedArgs {
|
|
||||||
pub fn nth(&self, pos: usize) -> Option<&Value> {
|
|
||||||
match &self.positional {
|
|
||||||
None => None,
|
|
||||||
Some(array) => array.iter().nth(pos),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> {
|
|
||||||
match &self.positional {
|
|
||||||
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
|
||||||
Some(array) => match array.iter().nth(pos) {
|
|
||||||
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
|
||||||
Some(item) => Ok(item),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
match &self.positional {
|
|
||||||
None => 0,
|
|
||||||
Some(array) => array.len(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn has(&self, name: &str) -> bool {
|
|
||||||
match &self.named {
|
|
||||||
None => false,
|
|
||||||
Some(named) => named.contains_key(name),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(&self, name: &str) -> Option<&Value> {
|
|
||||||
match &self.named {
|
|
||||||
None => None,
|
|
||||||
Some(named) => named.get(name),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn positional_iter(&self) -> PositionalIter<'_> {
|
|
||||||
match &self.positional {
|
|
||||||
None => PositionalIter::Empty,
|
|
||||||
Some(v) => {
|
|
||||||
let iter = v.iter();
|
|
||||||
PositionalIter::Array(iter)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum PositionalIter<'a> {
|
|
||||||
Empty,
|
|
||||||
Array(std::slice::Iter<'a, Value>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for PositionalIter<'a> {
|
|
||||||
type Item = &'a Value;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
match self {
|
|
||||||
PositionalIter::Empty => None,
|
|
||||||
PositionalIter::Array(iter) => iter.next(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn evaluate_args(
|
|
||||||
call: &hir::Call,
|
|
||||||
registry: &CommandRegistry,
|
|
||||||
scope: &Scope,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<EvaluatedArgs, ShellError> {
|
|
||||||
let positional: Result<Option<Vec<_>>, _> = call
|
|
||||||
.positional()
|
|
||||||
.as_ref()
|
|
||||||
.map(|p| {
|
|
||||||
p.iter()
|
|
||||||
.map(|e| evaluate_baseline_expr(e, registry, scope, source))
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
.transpose();
|
|
||||||
|
|
||||||
let positional = positional?;
|
|
||||||
|
|
||||||
let named: Result<Option<IndexMap<String, Value>>, ShellError> = call
|
|
||||||
.named()
|
|
||||||
.as_ref()
|
|
||||||
.map(|n| {
|
|
||||||
let mut results = IndexMap::new();
|
|
||||||
|
|
||||||
for (name, value) in n.named.iter() {
|
|
||||||
match value {
|
|
||||||
hir::named::NamedValue::PresentSwitch(tag) => {
|
|
||||||
results.insert(name.clone(), UntaggedValue::boolean(true).into_value(tag));
|
|
||||||
}
|
|
||||||
hir::named::NamedValue::Value(expr) => {
|
|
||||||
results.insert(
|
|
||||||
name.clone(),
|
|
||||||
evaluate_baseline_expr(expr, registry, scope, source)?,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(results)
|
|
||||||
})
|
|
||||||
.transpose();
|
|
||||||
|
|
||||||
let named = named?;
|
|
||||||
|
|
||||||
Ok(EvaluatedArgs::new(positional, named))
|
|
||||||
}
|
|
33
crates/nu-protocol/src/syntax_shape.rs
Normal file
33
crates/nu-protocol/src/syntax_shape.rs
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum SyntaxShape {
|
||||||
|
Any,
|
||||||
|
String,
|
||||||
|
Member,
|
||||||
|
ColumnPath,
|
||||||
|
Number,
|
||||||
|
Range,
|
||||||
|
Int,
|
||||||
|
Path,
|
||||||
|
Pattern,
|
||||||
|
Block,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for SyntaxShape {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
b::kind(match self {
|
||||||
|
SyntaxShape::Any => "any shape",
|
||||||
|
SyntaxShape::String => "string shape",
|
||||||
|
SyntaxShape::Member => "member shape",
|
||||||
|
SyntaxShape::ColumnPath => "column path shape",
|
||||||
|
SyntaxShape::Number => "number shape",
|
||||||
|
SyntaxShape::Range => "range shape",
|
||||||
|
SyntaxShape::Int => "integer shape",
|
||||||
|
SyntaxShape::Path => "file path shape",
|
||||||
|
SyntaxShape::Pattern => "pattern shape",
|
||||||
|
SyntaxShape::Block => "block shape",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,4 @@
|
|||||||
use crate::prelude::*;
|
use nu_source::{DebugDocBuilder, HasSpan, Spanned, SpannedItem, Tagged};
|
||||||
use nu_source::{DebugDocBuilder, Spanned, SpannedItem, Tagged};
|
|
||||||
|
|
||||||
pub trait ShellTypeName {
|
pub trait ShellTypeName {
|
||||||
fn type_name(&self) -> &'static str;
|
fn type_name(&self) -> &'static str;
|
382
crates/nu-protocol/src/type_shape.rs
Normal file
382
crates/nu-protocol/src/type_shape.rs
Normal file
@ -0,0 +1,382 @@
|
|||||||
|
use crate::value::dict::Dictionary;
|
||||||
|
use crate::value::primitive::Primitive;
|
||||||
|
use crate::value::range::RangeInclusion;
|
||||||
|
use crate::value::{UntaggedValue, Value};
|
||||||
|
use derive_new::new;
|
||||||
|
use nu_source::{b, DebugDoc, DebugDocBuilder, PrettyDebug};
|
||||||
|
use serde::{Deserialize, Deserializer, Serialize};
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::hash::Hash;
|
||||||
|
|
||||||
|
/**
|
||||||
|
This file describes the structural types of the nushell system.
|
||||||
|
|
||||||
|
Its primary purpose today is to identify "equivalent" values for the purpose
|
||||||
|
of merging rows into a single table or identify rows in a table that have the
|
||||||
|
same shape for reflection.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, new)]
|
||||||
|
pub struct RangeType {
|
||||||
|
from: (Type, RangeInclusion),
|
||||||
|
to: (Type, RangeInclusion),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
|
pub enum Type {
|
||||||
|
Nothing,
|
||||||
|
Int,
|
||||||
|
Range(Box<RangeType>),
|
||||||
|
Decimal,
|
||||||
|
Bytesize,
|
||||||
|
String,
|
||||||
|
Line,
|
||||||
|
ColumnPath,
|
||||||
|
Pattern,
|
||||||
|
Boolean,
|
||||||
|
Date,
|
||||||
|
Duration,
|
||||||
|
Path,
|
||||||
|
Binary,
|
||||||
|
|
||||||
|
Row(Row),
|
||||||
|
Table(Vec<Type>),
|
||||||
|
|
||||||
|
// TODO: Block arguments
|
||||||
|
Block,
|
||||||
|
// TODO: Error type
|
||||||
|
Error,
|
||||||
|
|
||||||
|
// Stream markers (used as bookend markers rather than actual values)
|
||||||
|
BeginningOfStream,
|
||||||
|
EndOfStream,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, new)]
|
||||||
|
pub struct Row {
|
||||||
|
#[new(default)]
|
||||||
|
map: BTreeMap<Column, Type>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for Row {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
serializer.collect_map(self.map.iter())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for Row {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
struct RowVisitor;
|
||||||
|
|
||||||
|
impl<'de> serde::de::Visitor<'de> for RowVisitor {
|
||||||
|
type Value = Row;
|
||||||
|
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
|
write!(formatter, "a row")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
|
||||||
|
where
|
||||||
|
A: serde::de::MapAccess<'de>,
|
||||||
|
{
|
||||||
|
let mut new_map = BTreeMap::new();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let entry = map.next_entry()?;
|
||||||
|
|
||||||
|
match entry {
|
||||||
|
None => return Ok(Row { map: new_map }),
|
||||||
|
Some((key, value)) => {
|
||||||
|
new_map.insert(key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
deserializer.deserialize_map(RowVisitor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Type {
|
||||||
|
pub fn from_primitive(primitive: &Primitive) -> Type {
|
||||||
|
match primitive {
|
||||||
|
Primitive::Nothing => Type::Nothing,
|
||||||
|
Primitive::Int(_) => Type::Int,
|
||||||
|
Primitive::Range(range) => {
|
||||||
|
let (left_value, left_inclusion) = &range.from;
|
||||||
|
let (right_value, right_inclusion) = &range.to;
|
||||||
|
|
||||||
|
let left_type = (Type::from_primitive(left_value), *left_inclusion);
|
||||||
|
let right_type = (Type::from_primitive(right_value), *right_inclusion);
|
||||||
|
|
||||||
|
let range = RangeType::new(left_type, right_type);
|
||||||
|
Type::Range(Box::new(range))
|
||||||
|
}
|
||||||
|
Primitive::Decimal(_) => Type::Decimal,
|
||||||
|
Primitive::Bytes(_) => Type::Bytesize,
|
||||||
|
Primitive::String(_) => Type::String,
|
||||||
|
Primitive::Line(_) => Type::Line,
|
||||||
|
Primitive::ColumnPath(_) => Type::ColumnPath,
|
||||||
|
Primitive::Pattern(_) => Type::Pattern,
|
||||||
|
Primitive::Boolean(_) => Type::Boolean,
|
||||||
|
Primitive::Date(_) => Type::Date,
|
||||||
|
Primitive::Duration(_) => Type::Duration,
|
||||||
|
Primitive::Path(_) => Type::Path,
|
||||||
|
Primitive::Binary(_) => Type::Binary,
|
||||||
|
Primitive::BeginningOfStream => Type::BeginningOfStream,
|
||||||
|
Primitive::EndOfStream => Type::EndOfStream,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_dictionary(dictionary: &Dictionary) -> Type {
|
||||||
|
let mut map = BTreeMap::new();
|
||||||
|
|
||||||
|
for (key, value) in dictionary.entries.iter() {
|
||||||
|
let column = Column::String(key.clone());
|
||||||
|
map.insert(column, Type::from_value(value));
|
||||||
|
}
|
||||||
|
|
||||||
|
Type::Row(Row { map })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_table<'a>(table: impl IntoIterator<Item = &'a Value>) -> Type {
|
||||||
|
let mut vec = vec![];
|
||||||
|
|
||||||
|
for item in table.into_iter() {
|
||||||
|
vec.push(Type::from_value(item))
|
||||||
|
}
|
||||||
|
|
||||||
|
Type::Table(vec)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_value<'a>(value: impl Into<&'a UntaggedValue>) -> Type {
|
||||||
|
match value.into() {
|
||||||
|
UntaggedValue::Primitive(p) => Type::from_primitive(p),
|
||||||
|
UntaggedValue::Row(row) => Type::from_dictionary(row),
|
||||||
|
UntaggedValue::Table(table) => Type::from_table(table.iter()),
|
||||||
|
UntaggedValue::Error(_) => Type::Error,
|
||||||
|
UntaggedValue::Block(_) => Type::Block,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for Type {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
match self {
|
||||||
|
Type::Nothing => ty("nothing"),
|
||||||
|
Type::Int => ty("integer"),
|
||||||
|
Type::Range(range) => {
|
||||||
|
let (left, left_inclusion) = &range.from;
|
||||||
|
let (right, right_inclusion) = &range.to;
|
||||||
|
|
||||||
|
let left_bracket = b::delimiter(match left_inclusion {
|
||||||
|
RangeInclusion::Exclusive => "(",
|
||||||
|
RangeInclusion::Inclusive => "[",
|
||||||
|
});
|
||||||
|
|
||||||
|
let right_bracket = b::delimiter(match right_inclusion {
|
||||||
|
RangeInclusion::Exclusive => ")",
|
||||||
|
RangeInclusion::Inclusive => "]",
|
||||||
|
});
|
||||||
|
|
||||||
|
b::typed(
|
||||||
|
"range",
|
||||||
|
(left_bracket
|
||||||
|
+ left.pretty()
|
||||||
|
+ b::operator(",")
|
||||||
|
+ b::space()
|
||||||
|
+ right.pretty()
|
||||||
|
+ right_bracket)
|
||||||
|
.group(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Type::Decimal => ty("decimal"),
|
||||||
|
Type::Bytesize => ty("bytesize"),
|
||||||
|
Type::String => ty("string"),
|
||||||
|
Type::Line => ty("line"),
|
||||||
|
Type::ColumnPath => ty("column-path"),
|
||||||
|
Type::Pattern => ty("pattern"),
|
||||||
|
Type::Boolean => ty("boolean"),
|
||||||
|
Type::Date => ty("date"),
|
||||||
|
Type::Duration => ty("duration"),
|
||||||
|
Type::Path => ty("path"),
|
||||||
|
Type::Binary => ty("binary"),
|
||||||
|
Type::Error => b::error("error"),
|
||||||
|
Type::BeginningOfStream => b::keyword("beginning-of-stream"),
|
||||||
|
Type::EndOfStream => b::keyword("end-of-stream"),
|
||||||
|
Type::Row(row) => (b::kind("row")
|
||||||
|
+ b::space()
|
||||||
|
+ b::intersperse(
|
||||||
|
row.map.iter().map(|(key, ty)| {
|
||||||
|
(b::key(match key {
|
||||||
|
Column::String(string) => string.clone(),
|
||||||
|
Column::Value => "<value>".to_string(),
|
||||||
|
}) + b::delimit("(", ty.pretty(), ")").into_kind())
|
||||||
|
.nest()
|
||||||
|
}),
|
||||||
|
b::space(),
|
||||||
|
)
|
||||||
|
.nest())
|
||||||
|
.nest(),
|
||||||
|
|
||||||
|
Type::Table(table) => {
|
||||||
|
let mut group: Group<DebugDoc, Vec<(usize, usize)>> = Group::new();
|
||||||
|
|
||||||
|
for (i, item) in table.iter().enumerate() {
|
||||||
|
group.add(item.to_doc(), i);
|
||||||
|
}
|
||||||
|
|
||||||
|
(b::kind("table") + b::space() + b::keyword("of")).group()
|
||||||
|
+ b::space()
|
||||||
|
+ (if group.len() == 1 {
|
||||||
|
let (doc, _) = group.into_iter().nth(0).unwrap();
|
||||||
|
DebugDocBuilder::from_doc(doc)
|
||||||
|
} else {
|
||||||
|
b::intersperse(
|
||||||
|
group.into_iter().map(|(doc, rows)| {
|
||||||
|
(b::intersperse(
|
||||||
|
rows.iter().map(|(from, to)| {
|
||||||
|
if from == to {
|
||||||
|
b::description(from)
|
||||||
|
} else {
|
||||||
|
(b::description(from)
|
||||||
|
+ b::space()
|
||||||
|
+ b::keyword("to")
|
||||||
|
+ b::space()
|
||||||
|
+ b::description(to))
|
||||||
|
.group()
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
b::description(", "),
|
||||||
|
) + b::description(":")
|
||||||
|
+ b::space()
|
||||||
|
+ DebugDocBuilder::from_doc(doc))
|
||||||
|
.nest()
|
||||||
|
}),
|
||||||
|
b::space(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Type::Block => ty("block"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, new)]
|
||||||
|
struct DebugEntry<'a> {
|
||||||
|
key: &'a Column,
|
||||||
|
value: &'a Type,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> PrettyDebug for DebugEntry<'a> {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
(b::key(match self.key {
|
||||||
|
Column::String(string) => string.clone(),
|
||||||
|
Column::Value => format!("<value>"),
|
||||||
|
}) + b::delimit("(", self.value.pretty(), ")").into_kind())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ty(name: impl std::fmt::Display) -> DebugDocBuilder {
|
||||||
|
b::kind(format!("{}", name))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait GroupedValue: Debug + Clone {
|
||||||
|
type Item;
|
||||||
|
|
||||||
|
fn new() -> Self;
|
||||||
|
fn merge(&mut self, value: Self::Item);
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GroupedValue for Vec<(usize, usize)> {
|
||||||
|
type Item = usize;
|
||||||
|
|
||||||
|
fn new() -> Vec<(usize, usize)> {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn merge(&mut self, new_value: usize) {
|
||||||
|
match self.last_mut() {
|
||||||
|
Some(value) if value.1 == new_value - 1 => {
|
||||||
|
value.1 += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => self.push((new_value, new_value)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Group<K: Debug + Eq + Hash, V: GroupedValue> {
|
||||||
|
values: indexmap::IndexMap<K, V>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, G> Group<K, G>
|
||||||
|
where
|
||||||
|
K: Debug + Eq + Hash,
|
||||||
|
G: GroupedValue,
|
||||||
|
{
|
||||||
|
pub fn new() -> Group<K, G> {
|
||||||
|
Group {
|
||||||
|
values: indexmap::IndexMap::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.values.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_iter(self) -> impl Iterator<Item = (K, G)> {
|
||||||
|
self.values.into_iter()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add(&mut self, key: impl Into<K>, value: impl Into<G::Item>) {
|
||||||
|
let key = key.into();
|
||||||
|
let value = value.into();
|
||||||
|
|
||||||
|
let group = self.values.get_mut(&key);
|
||||||
|
|
||||||
|
match group {
|
||||||
|
None => {
|
||||||
|
self.values.insert(key, {
|
||||||
|
let mut group = G::new();
|
||||||
|
group.merge(value.into());
|
||||||
|
group
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some(group) => {
|
||||||
|
group.merge(value.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||||
|
pub enum Column {
|
||||||
|
String(String),
|
||||||
|
Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<Column> for String {
|
||||||
|
fn into(self) -> Column {
|
||||||
|
Column::String(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<Column> for &String {
|
||||||
|
fn into(self) -> Column {
|
||||||
|
Column::String(self.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<Column> for &str {
|
||||||
|
fn into(self) -> Column {
|
||||||
|
Column::String(self.to_string())
|
||||||
|
}
|
||||||
|
}
|
307
crates/nu-protocol/src/value.rs
Normal file
307
crates/nu-protocol/src/value.rs
Normal file
@ -0,0 +1,307 @@
|
|||||||
|
pub mod column_path;
|
||||||
|
mod convert;
|
||||||
|
mod debug;
|
||||||
|
pub mod dict;
|
||||||
|
pub mod evaluate;
|
||||||
|
pub mod primitive;
|
||||||
|
pub mod range;
|
||||||
|
mod serde_bigdecimal;
|
||||||
|
mod serde_bigint;
|
||||||
|
|
||||||
|
use crate::type_name::{ShellTypeName, SpannedTypeName};
|
||||||
|
use crate::value::dict::Dictionary;
|
||||||
|
use crate::value::evaluate::Evaluate;
|
||||||
|
use crate::value::primitive::Primitive;
|
||||||
|
use crate::value::range::{Range, RangeInclusion};
|
||||||
|
use crate::{ColumnPath, PathMember};
|
||||||
|
use bigdecimal::BigDecimal;
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
use nu_errors::ShellError;
|
||||||
|
use nu_source::{AnchorLocation, HasSpan, Span, Spanned, Tag};
|
||||||
|
use num_bigint::BigInt;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::time::SystemTime;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Serialize, Deserialize)]
|
||||||
|
pub enum UntaggedValue {
|
||||||
|
Primitive(Primitive),
|
||||||
|
Row(Dictionary),
|
||||||
|
Table(Vec<Value>),
|
||||||
|
|
||||||
|
// Errors are a type of value too
|
||||||
|
Error(ShellError),
|
||||||
|
|
||||||
|
Block(Evaluate),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UntaggedValue {
|
||||||
|
pub fn retag(self, tag: impl Into<Tag>) -> Value {
|
||||||
|
Value {
|
||||||
|
value: self,
|
||||||
|
tag: tag.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn data_descriptors(&self) -> Vec<String> {
|
||||||
|
match self {
|
||||||
|
UntaggedValue::Primitive(_) => vec![],
|
||||||
|
UntaggedValue::Row(columns) => columns
|
||||||
|
.entries
|
||||||
|
.keys()
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| x.to_string())
|
||||||
|
.collect(),
|
||||||
|
UntaggedValue::Block(_) => vec![],
|
||||||
|
UntaggedValue::Table(_) => vec![],
|
||||||
|
UntaggedValue::Error(_) => vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_value(self, tag: impl Into<Tag>) -> Value {
|
||||||
|
Value {
|
||||||
|
value: self,
|
||||||
|
tag: tag.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_untagged_value(self) -> Value {
|
||||||
|
Value {
|
||||||
|
value: self,
|
||||||
|
tag: Tag::unknown(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_true(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
UntaggedValue::Primitive(Primitive::Boolean(true)) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_some(&self) -> bool {
|
||||||
|
!self.is_none()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_none(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
UntaggedValue::Primitive(Primitive::Nothing) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_error(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
UntaggedValue::Error(_err) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expect_error(&self) -> ShellError {
|
||||||
|
match self {
|
||||||
|
UntaggedValue::Error(err) => err.clone(),
|
||||||
|
_ => panic!("Don't call expect_error without first calling is_error"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expect_string(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
UntaggedValue::Primitive(Primitive::String(string)) => &string[..],
|
||||||
|
_ => panic!("expect_string assumes that the value must be a string"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
pub fn row(entries: IndexMap<String, Value>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Row(entries.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn table(list: &Vec<Value>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Table(list.to_vec())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn string(s: impl Into<String>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::String(s.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn line(s: impl Into<String>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Line(s.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn column_path(s: Vec<impl Into<PathMember>>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(
|
||||||
|
s.into_iter().map(|p| p.into()).collect(),
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn int(i: impl Into<BigInt>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Int(i.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pattern(s: impl Into<String>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::String(s.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn path(s: impl Into<PathBuf>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Path(s.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bytes(s: impl Into<u64>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Bytes(s.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal(s: impl Into<BigDecimal>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Decimal(s.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn binary(binary: Vec<u8>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Binary(binary))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn range(
|
||||||
|
left: (Spanned<Primitive>, RangeInclusion),
|
||||||
|
right: (Spanned<Primitive>, RangeInclusion),
|
||||||
|
) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Range(Box::new(Range::new(left, right))))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn boolean(s: impl Into<bool>) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Boolean(s.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn duration(secs: u64) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Duration(secs))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn system_date(s: SystemTime) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Date(s.into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nothing() -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::Nothing)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialOrd, PartialEq, Ord, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Value {
|
||||||
|
pub value: UntaggedValue,
|
||||||
|
pub tag: Tag,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::ops::Deref for Value {
|
||||||
|
type Target = UntaggedValue;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Value {
|
||||||
|
pub fn anchor(&self) -> Option<AnchorLocation> {
|
||||||
|
self.tag.anchor()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn anchor_name(&self) -> Option<String> {
|
||||||
|
self.tag.anchor_name()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tag(&self) -> Tag {
|
||||||
|
self.tag.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_string(&self) -> Result<String, ShellError> {
|
||||||
|
match &self.value {
|
||||||
|
UntaggedValue::Primitive(Primitive::String(string)) => Ok(string.clone()),
|
||||||
|
UntaggedValue::Primitive(Primitive::Line(line)) => Ok(line.clone() + "\n"),
|
||||||
|
_ => Err(ShellError::type_error("string", self.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_forgiving_string(&self) -> Result<&str, ShellError> {
|
||||||
|
match &self.value {
|
||||||
|
UntaggedValue::Primitive(Primitive::String(string)) => Ok(&string[..]),
|
||||||
|
_ => Err(ShellError::type_error("string", self.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_path(&self) -> Result<PathBuf, ShellError> {
|
||||||
|
match &self.value {
|
||||||
|
UntaggedValue::Primitive(Primitive::Path(path)) => Ok(path.clone()),
|
||||||
|
UntaggedValue::Primitive(Primitive::String(path_str)) => {
|
||||||
|
Ok(PathBuf::from(&path_str).clone())
|
||||||
|
}
|
||||||
|
_ => Err(ShellError::type_error("Path", self.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_primitive(&self) -> Result<Primitive, ShellError> {
|
||||||
|
match &self.value {
|
||||||
|
UntaggedValue::Primitive(primitive) => Ok(primitive.clone()),
|
||||||
|
_ => Err(ShellError::type_error(
|
||||||
|
"Primitive",
|
||||||
|
self.spanned_type_name(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_u64(&self) -> Result<u64, ShellError> {
|
||||||
|
match &self.value {
|
||||||
|
UntaggedValue::Primitive(primitive) => primitive.as_u64(self.tag.span),
|
||||||
|
_ => Err(ShellError::type_error("integer", self.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<UntaggedValue> for &str {
|
||||||
|
fn into(self) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::String(self.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<UntaggedValue> for Value {
|
||||||
|
fn into(self) -> UntaggedValue {
|
||||||
|
self.value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Into<&'a UntaggedValue> for &'a Value {
|
||||||
|
fn into(self) -> &'a UntaggedValue {
|
||||||
|
&self.value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for Value {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.tag.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ShellTypeName for Value {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
ShellTypeName::type_name(&self.value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ShellTypeName for UntaggedValue {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
match &self {
|
||||||
|
UntaggedValue::Primitive(p) => p.type_name(),
|
||||||
|
UntaggedValue::Row(_) => "row",
|
||||||
|
UntaggedValue::Table(_) => "table",
|
||||||
|
UntaggedValue::Error(_) => "error",
|
||||||
|
UntaggedValue::Block(_) => "block",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Primitive> for UntaggedValue {
|
||||||
|
fn from(input: Primitive) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for UntaggedValue {
|
||||||
|
fn from(input: String) -> UntaggedValue {
|
||||||
|
UntaggedValue::Primitive(Primitive::String(input))
|
||||||
|
}
|
||||||
|
}
|
@ -1,8 +1,8 @@
|
|||||||
use crate::parser::hir::Expression;
|
use crate::Value;
|
||||||
use crate::prelude::*;
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::{Getters, MutGetters};
|
use getset::Getters;
|
||||||
use nu_source::{b, span_for_spanned_list, PrettyDebug};
|
use nu_source::{b, span_for_spanned_list, DebugDocBuilder, HasFallibleSpan, PrettyDebug, Span};
|
||||||
|
use num_bigint::BigInt;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
@ -87,37 +87,28 @@ impl PathMember {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(
|
pub fn did_you_mean(obj_source: &Value, field_tried: &PathMember) -> Option<Vec<(usize, String)>> {
|
||||||
Debug,
|
let field_tried = match &field_tried.unspanned {
|
||||||
Clone,
|
UnspannedPathMember::String(string) => string.clone(),
|
||||||
Eq,
|
UnspannedPathMember::Int(int) => format!("{}", int),
|
||||||
PartialEq,
|
};
|
||||||
Ord,
|
|
||||||
PartialOrd,
|
|
||||||
Hash,
|
|
||||||
Getters,
|
|
||||||
MutGetters,
|
|
||||||
Serialize,
|
|
||||||
Deserialize,
|
|
||||||
new,
|
|
||||||
)]
|
|
||||||
#[get = "pub(crate)"]
|
|
||||||
pub struct Path {
|
|
||||||
head: Expression,
|
|
||||||
#[get_mut = "pub(crate)"]
|
|
||||||
tail: Vec<PathMember>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Path {
|
let possibilities = obj_source.data_descriptors();
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
self.head.pretty_debug(source)
|
let mut possible_matches: Vec<_> = possibilities
|
||||||
+ b::operator(".")
|
.into_iter()
|
||||||
+ b::intersperse(self.tail.iter().map(|m| m.pretty()), b::operator("."))
|
.map(|x| {
|
||||||
}
|
let word = x.clone();
|
||||||
}
|
let distance = natural::distance::levenshtein_distance(&word, &field_tried);
|
||||||
|
|
||||||
impl Path {
|
(distance, word)
|
||||||
pub(crate) fn parts(self) -> (Expression, Vec<PathMember>) {
|
})
|
||||||
(self.head, self.tail)
|
.collect();
|
||||||
|
|
||||||
|
if !possible_matches.is_empty() {
|
||||||
|
possible_matches.sort();
|
||||||
|
Some(possible_matches)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
55
crates/nu-protocol/src/value/convert.rs
Normal file
55
crates/nu-protocol/src/value/convert.rs
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
use crate::type_name::SpannedTypeName;
|
||||||
|
use crate::value::dict::Dictionary;
|
||||||
|
use crate::value::primitive::Primitive;
|
||||||
|
use crate::value::{UntaggedValue, Value};
|
||||||
|
use nu_errors::{CoerceInto, ShellError};
|
||||||
|
use nu_source::TaggedItem;
|
||||||
|
|
||||||
|
impl std::convert::TryFrom<&Value> for i64 {
|
||||||
|
type Error = ShellError;
|
||||||
|
|
||||||
|
fn try_from(value: &Value) -> Result<i64, ShellError> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Primitive(Primitive::Int(int)) => {
|
||||||
|
int.tagged(&value.tag).coerce_into("converting to i64")
|
||||||
|
}
|
||||||
|
_ => Err(ShellError::type_error("Integer", value.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::convert::TryFrom<&Value> for String {
|
||||||
|
type Error = ShellError;
|
||||||
|
|
||||||
|
fn try_from(value: &Value) -> Result<String, ShellError> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||||
|
_ => Err(ShellError::type_error("String", value.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::convert::TryFrom<&Value> for Vec<u8> {
|
||||||
|
type Error = ShellError;
|
||||||
|
|
||||||
|
fn try_from(value: &Value) -> Result<Vec<u8>, ShellError> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
|
||||||
|
_ => Err(ShellError::type_error("Binary", value.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> std::convert::TryFrom<&'a Value> for &'a Dictionary {
|
||||||
|
type Error = ShellError;
|
||||||
|
|
||||||
|
fn try_from(value: &'a Value) -> Result<&'a Dictionary, ShellError> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Row(d) => Ok(d),
|
||||||
|
_ => Err(ShellError::type_error(
|
||||||
|
"Dictionary",
|
||||||
|
value.spanned_type_name(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,15 +1,38 @@
|
|||||||
use crate::data::base::Primitive;
|
use crate::type_name::PrettyType;
|
||||||
use crate::traits::PrettyType;
|
use crate::value::primitive::Primitive;
|
||||||
|
use crate::value::{UntaggedValue, Value};
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||||
|
|
||||||
|
impl PrettyDebug for &Value {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
PrettyDebug::pretty(*self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for Value {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
match &self.value {
|
||||||
|
UntaggedValue::Primitive(p) => p.pretty(),
|
||||||
|
UntaggedValue::Row(row) => row.pretty_builder().nest(1).group().into(),
|
||||||
|
UntaggedValue::Table(table) => {
|
||||||
|
b::delimit("[", b::intersperse(table, b::space()), "]").nest()
|
||||||
|
}
|
||||||
|
UntaggedValue::Error(_) => b::error("error"),
|
||||||
|
UntaggedValue::Block(_) => b::opaque("block"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyType for Primitive {
|
impl PrettyType for Primitive {
|
||||||
fn pretty_type(&self) -> DebugDocBuilder {
|
fn pretty_type(&self) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
Primitive::Nothing => ty("nothing"),
|
Primitive::Nothing => ty("nothing"),
|
||||||
Primitive::Int(_) => ty("integer"),
|
Primitive::Int(_) => ty("integer"),
|
||||||
|
Primitive::Range(_) => ty("range"),
|
||||||
Primitive::Decimal(_) => ty("decimal"),
|
Primitive::Decimal(_) => ty("decimal"),
|
||||||
Primitive::Bytes(_) => ty("bytesize"),
|
Primitive::Bytes(_) => ty("bytesize"),
|
||||||
Primitive::String(_) => ty("string"),
|
Primitive::String(_) => ty("string"),
|
||||||
|
Primitive::Line(_) => ty("line"),
|
||||||
Primitive::ColumnPath(_) => ty("column-path"),
|
Primitive::ColumnPath(_) => ty("column-path"),
|
||||||
Primitive::Pattern(_) => ty("pattern"),
|
Primitive::Pattern(_) => ty("pattern"),
|
||||||
Primitive::Boolean(_) => ty("boolean"),
|
Primitive::Boolean(_) => ty("boolean"),
|
||||||
@ -29,8 +52,24 @@ impl PrettyDebug for Primitive {
|
|||||||
Primitive::Nothing => b::primitive("nothing"),
|
Primitive::Nothing => b::primitive("nothing"),
|
||||||
Primitive::Int(int) => prim(format_args!("{}", int)),
|
Primitive::Int(int) => prim(format_args!("{}", int)),
|
||||||
Primitive::Decimal(decimal) => prim(format_args!("{}", decimal)),
|
Primitive::Decimal(decimal) => prim(format_args!("{}", decimal)),
|
||||||
|
Primitive::Range(range) => {
|
||||||
|
let (left, left_inclusion) = &range.from;
|
||||||
|
let (right, right_inclusion) = &range.to;
|
||||||
|
|
||||||
|
b::typed(
|
||||||
|
"range",
|
||||||
|
(left_inclusion.debug_left_bracket()
|
||||||
|
+ left.pretty()
|
||||||
|
+ b::operator(",")
|
||||||
|
+ b::space()
|
||||||
|
+ right.pretty()
|
||||||
|
+ right_inclusion.debug_right_bracket())
|
||||||
|
.group(),
|
||||||
|
)
|
||||||
|
}
|
||||||
Primitive::Bytes(bytes) => primitive_doc(bytes, "bytesize"),
|
Primitive::Bytes(bytes) => primitive_doc(bytes, "bytesize"),
|
||||||
Primitive::String(string) => prim(string),
|
Primitive::String(string) => prim(string),
|
||||||
|
Primitive::Line(string) => prim(string),
|
||||||
Primitive::ColumnPath(path) => path.pretty(),
|
Primitive::ColumnPath(path) => path.pretty(),
|
||||||
Primitive::Pattern(pattern) => primitive_doc(pattern, "pattern"),
|
Primitive::Pattern(pattern) => primitive_doc(pattern, "pattern"),
|
||||||
Primitive::Boolean(boolean) => match boolean {
|
Primitive::Boolean(boolean) => match boolean {
|
||||||
@ -51,10 +90,10 @@ fn prim(name: impl std::fmt::Debug) -> DebugDocBuilder {
|
|||||||
b::primitive(format!("{:?}", name))
|
b::primitive(format!("{:?}", name))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ty(name: impl std::fmt::Debug) -> DebugDocBuilder {
|
|
||||||
b::kind(format!("{:?}", name))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn primitive_doc(name: impl std::fmt::Debug, ty: impl Into<String>) -> DebugDocBuilder {
|
fn primitive_doc(name: impl std::fmt::Debug, ty: impl Into<String>) -> DebugDocBuilder {
|
||||||
b::primitive(format!("{:?}", name)) + b::delimit("(", b::kind(ty.into()), ")")
|
b::primitive(format!("{:?}", name)) + b::delimit("(", b::kind(ty.into()), ")")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn ty(name: impl std::fmt::Debug) -> DebugDocBuilder {
|
||||||
|
b::kind(format!("{:?}", name))
|
||||||
|
}
|
196
crates/nu-protocol/src/value/dict.rs
Normal file
196
crates/nu-protocol/src/value/dict.rs
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
use crate::maybe_owned::MaybeOwned;
|
||||||
|
use crate::value::primitive::Primitive;
|
||||||
|
use crate::value::{UntaggedValue, Value};
|
||||||
|
use derive_new::new;
|
||||||
|
use getset::Getters;
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
use nu_source::{b, DebugDocBuilder, PrettyDebug, Spanned, Tag};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Serialize, Deserialize, PartialEq, Eq, Clone, Getters, new)]
|
||||||
|
pub struct Dictionary {
|
||||||
|
#[get = "pub"]
|
||||||
|
pub entries: IndexMap<String, Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for Dictionary {
|
||||||
|
fn partial_cmp(&self, other: &Dictionary) -> Option<Ordering> {
|
||||||
|
let this: Vec<&String> = self.entries.keys().collect();
|
||||||
|
let that: Vec<&String> = other.entries.keys().collect();
|
||||||
|
|
||||||
|
if this != that {
|
||||||
|
return this.partial_cmp(&that);
|
||||||
|
}
|
||||||
|
|
||||||
|
let this: Vec<&Value> = self.entries.values().collect();
|
||||||
|
let that: Vec<&Value> = self.entries.values().collect();
|
||||||
|
|
||||||
|
this.partial_cmp(&that)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for Dictionary {
|
||||||
|
fn cmp(&self, other: &Dictionary) -> Ordering {
|
||||||
|
let this: Vec<&String> = self.entries.keys().collect();
|
||||||
|
let that: Vec<&String> = other.entries.keys().collect();
|
||||||
|
|
||||||
|
if this != that {
|
||||||
|
return this.cmp(&that);
|
||||||
|
}
|
||||||
|
|
||||||
|
let this: Vec<&Value> = self.entries.values().collect();
|
||||||
|
let that: Vec<&Value> = self.entries.values().collect();
|
||||||
|
|
||||||
|
this.cmp(&that)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<Value> for Dictionary {
|
||||||
|
fn eq(&self, other: &Value) -> bool {
|
||||||
|
match &other.value {
|
||||||
|
UntaggedValue::Row(d) => self == d,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, new)]
|
||||||
|
struct DebugEntry<'a> {
|
||||||
|
key: &'a str,
|
||||||
|
value: &'a Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> PrettyDebug for DebugEntry<'a> {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
(b::key(self.key.to_string()) + b::equals() + self.value.pretty().into_value()).group()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for Dictionary {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
b::delimit(
|
||||||
|
"(",
|
||||||
|
b::intersperse(
|
||||||
|
self.entries()
|
||||||
|
.iter()
|
||||||
|
.map(|(key, value)| DebugEntry::new(key, value)),
|
||||||
|
b::space(),
|
||||||
|
),
|
||||||
|
")",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<IndexMap<String, Value>> for Dictionary {
|
||||||
|
fn from(input: IndexMap<String, Value>) -> Dictionary {
|
||||||
|
let mut out = IndexMap::default();
|
||||||
|
|
||||||
|
for (key, value) in input {
|
||||||
|
out.insert(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
Dictionary::new(out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Dictionary {
|
||||||
|
pub fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> {
|
||||||
|
match self.entries.get(desc) {
|
||||||
|
Some(v) => MaybeOwned::Borrowed(v),
|
||||||
|
None => MaybeOwned::Owned(
|
||||||
|
UntaggedValue::Primitive(Primitive::Nothing).into_untagged_value(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn keys(&self) -> impl Iterator<Item = &String> {
|
||||||
|
self.entries.keys()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value> {
|
||||||
|
let result = self
|
||||||
|
.entries
|
||||||
|
.iter()
|
||||||
|
.find(|(desc_name, _)| *desc_name == name.item)?
|
||||||
|
.1;
|
||||||
|
|
||||||
|
Some(
|
||||||
|
result
|
||||||
|
.value
|
||||||
|
.clone()
|
||||||
|
.into_value(Tag::new(result.tag.anchor(), name.span)),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Value> {
|
||||||
|
match self
|
||||||
|
.entries
|
||||||
|
.iter_mut()
|
||||||
|
.find(|(desc_name, _)| *desc_name == name)
|
||||||
|
{
|
||||||
|
Some((_, v)) => Some(v),
|
||||||
|
None => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert_data_at_key(&mut self, name: &str, value: Value) {
|
||||||
|
self.entries.insert(name.to_string(), value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct TaggedDictBuilder {
|
||||||
|
tag: Tag,
|
||||||
|
dict: IndexMap<String, Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TaggedDictBuilder {
|
||||||
|
pub fn new(tag: impl Into<Tag>) -> TaggedDictBuilder {
|
||||||
|
TaggedDictBuilder {
|
||||||
|
tag: tag.into(),
|
||||||
|
dict: IndexMap::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build(tag: impl Into<Tag>, block: impl FnOnce(&mut TaggedDictBuilder)) -> Value {
|
||||||
|
let mut builder = TaggedDictBuilder::new(tag);
|
||||||
|
block(&mut builder);
|
||||||
|
builder.into_value()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_capacity(tag: impl Into<Tag>, n: usize) -> TaggedDictBuilder {
|
||||||
|
TaggedDictBuilder {
|
||||||
|
tag: tag.into(),
|
||||||
|
dict: IndexMap::with_capacity(n),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert_untagged(&mut self, key: impl Into<String>, value: impl Into<UntaggedValue>) {
|
||||||
|
self.dict
|
||||||
|
.insert(key.into(), value.into().into_value(&self.tag));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert_value(&mut self, key: impl Into<String>, value: impl Into<Value>) {
|
||||||
|
self.dict.insert(key.into(), value.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_value(self) -> Value {
|
||||||
|
let tag = self.tag.clone();
|
||||||
|
self.into_untagged_value().into_value(tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_untagged_value(self) -> UntaggedValue {
|
||||||
|
UntaggedValue::Row(Dictionary { entries: self.dict })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.dict.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<TaggedDictBuilder> for Value {
|
||||||
|
fn from(input: TaggedDictBuilder) -> Value {
|
||||||
|
input.into_value()
|
||||||
|
}
|
||||||
|
}
|
102
crates/nu-protocol/src/value/evaluate.rs
Normal file
102
crates/nu-protocol/src/value/evaluate.rs
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
use crate::value::{Primitive, UntaggedValue, Value};
|
||||||
|
use indexmap::IndexMap;
|
||||||
|
use nu_errors::ShellError;
|
||||||
|
use query_interface::{interfaces, vtable_for, Object, ObjectHash};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Scope {
|
||||||
|
pub it: Value,
|
||||||
|
pub vars: IndexMap<String, Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Scope {
|
||||||
|
pub fn new(it: Value) -> Scope {
|
||||||
|
Scope {
|
||||||
|
it,
|
||||||
|
vars: IndexMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Scope {
|
||||||
|
pub fn empty() -> Scope {
|
||||||
|
Scope {
|
||||||
|
it: UntaggedValue::Primitive(Primitive::Nothing).into_untagged_value(),
|
||||||
|
vars: IndexMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn it_value(value: Value) -> Scope {
|
||||||
|
Scope {
|
||||||
|
it: value,
|
||||||
|
vars: IndexMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[typetag::serde(tag = "type")]
|
||||||
|
pub trait EvaluateTrait: Debug + Send + Sync + Object + ObjectHash + 'static {
|
||||||
|
fn invoke(&self, scope: &Scope) -> Result<Value, ShellError>;
|
||||||
|
fn clone_box(&self) -> Evaluate;
|
||||||
|
}
|
||||||
|
|
||||||
|
interfaces!(Evaluate: dyn ObjectHash);
|
||||||
|
|
||||||
|
#[typetag::serde]
|
||||||
|
impl EvaluateTrait for Evaluate {
|
||||||
|
fn invoke(&self, scope: &Scope) -> Result<Value, ShellError> {
|
||||||
|
self.expr.invoke(scope)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn clone_box(&self) -> Evaluate {
|
||||||
|
self.expr.clone_box()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct Evaluate {
|
||||||
|
expr: Box<dyn EvaluateTrait>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Evaluate {
|
||||||
|
pub fn new(evaluate: impl EvaluateTrait) -> Evaluate {
|
||||||
|
Evaluate {
|
||||||
|
expr: Box::new(evaluate),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::hash::Hash for Evaluate {
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||||
|
self.expr.obj_hash(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for Evaluate {
|
||||||
|
fn clone(&self) -> Evaluate {
|
||||||
|
self.expr.clone_box()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for Evaluate {
|
||||||
|
fn cmp(&self, _: &Self) -> Ordering {
|
||||||
|
Ordering::Equal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for Evaluate {
|
||||||
|
fn partial_cmp(&self, _: &Evaluate) -> Option<Ordering> {
|
||||||
|
Some(Ordering::Equal)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for Evaluate {
|
||||||
|
fn eq(&self, _: &Evaluate) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for Evaluate {}
|
169
crates/nu-protocol/src/value/primitive.rs
Normal file
169
crates/nu-protocol/src/value/primitive.rs
Normal file
@ -0,0 +1,169 @@
|
|||||||
|
use crate::type_name::ShellTypeName;
|
||||||
|
use crate::value::column_path::ColumnPath;
|
||||||
|
use crate::value::range::Range;
|
||||||
|
use crate::value::{serde_bigdecimal, serde_bigint};
|
||||||
|
use bigdecimal::BigDecimal;
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use chrono_humanize::Humanize;
|
||||||
|
use nu_errors::{ExpectedRange, ShellError};
|
||||||
|
use nu_source::{PrettyDebug, Span, SpannedItem};
|
||||||
|
use num_bigint::BigInt;
|
||||||
|
use num_traits::cast::{FromPrimitive, ToPrimitive};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Deserialize, Serialize)]
|
||||||
|
pub enum Primitive {
|
||||||
|
Nothing,
|
||||||
|
#[serde(with = "serde_bigint")]
|
||||||
|
Int(BigInt),
|
||||||
|
#[serde(with = "serde_bigdecimal")]
|
||||||
|
Decimal(BigDecimal),
|
||||||
|
Bytes(u64),
|
||||||
|
String(String),
|
||||||
|
Line(String),
|
||||||
|
ColumnPath(ColumnPath),
|
||||||
|
Pattern(String),
|
||||||
|
Boolean(bool),
|
||||||
|
Date(DateTime<Utc>),
|
||||||
|
Duration(u64), // Duration in seconds
|
||||||
|
Range(Box<Range>),
|
||||||
|
Path(PathBuf),
|
||||||
|
#[serde(with = "serde_bytes")]
|
||||||
|
Binary(Vec<u8>),
|
||||||
|
|
||||||
|
// Stream markers (used as bookend markers rather than actual values)
|
||||||
|
BeginningOfStream,
|
||||||
|
EndOfStream,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Primitive {
|
||||||
|
pub fn as_u64(&self, span: Span) -> Result<u64, ShellError> {
|
||||||
|
match self {
|
||||||
|
Primitive::Int(int) => match int.to_u64() {
|
||||||
|
None => Err(ShellError::range_error(
|
||||||
|
ExpectedRange::U64,
|
||||||
|
&format!("{}", int).spanned(span),
|
||||||
|
"converting an integer into a 64-bit integer",
|
||||||
|
)),
|
||||||
|
Some(num) => Ok(num),
|
||||||
|
},
|
||||||
|
other => Err(ShellError::type_error(
|
||||||
|
"integer",
|
||||||
|
other.type_name().spanned(span),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<BigDecimal> for Primitive {
|
||||||
|
fn from(decimal: BigDecimal) -> Primitive {
|
||||||
|
Primitive::Decimal(decimal)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<f64> for Primitive {
|
||||||
|
fn from(float: f64) -> Primitive {
|
||||||
|
Primitive::Decimal(BigDecimal::from_f64(float).unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ShellTypeName for Primitive {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Primitive::Nothing => "nothing",
|
||||||
|
Primitive::Int(_) => "integer",
|
||||||
|
Primitive::Range(_) => "range",
|
||||||
|
Primitive::Decimal(_) => "decimal",
|
||||||
|
Primitive::Bytes(_) => "bytes",
|
||||||
|
Primitive::String(_) => "string",
|
||||||
|
Primitive::Line(_) => "line",
|
||||||
|
Primitive::ColumnPath(_) => "column path",
|
||||||
|
Primitive::Pattern(_) => "pattern",
|
||||||
|
Primitive::Boolean(_) => "boolean",
|
||||||
|
Primitive::Date(_) => "date",
|
||||||
|
Primitive::Duration(_) => "duration",
|
||||||
|
Primitive::Path(_) => "file path",
|
||||||
|
Primitive::Binary(_) => "binary",
|
||||||
|
Primitive::BeginningOfStream => "marker<beginning of stream>",
|
||||||
|
Primitive::EndOfStream => "marker<end of stream>",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> String {
|
||||||
|
match primitive {
|
||||||
|
Primitive::Nothing => String::new(),
|
||||||
|
Primitive::BeginningOfStream => String::new(),
|
||||||
|
Primitive::EndOfStream => String::new(),
|
||||||
|
Primitive::Path(p) => format!("{}", p.display()),
|
||||||
|
Primitive::Bytes(b) => {
|
||||||
|
let byte = byte_unit::Byte::from_bytes(*b as u128);
|
||||||
|
|
||||||
|
if byte.get_bytes() == 0u128 {
|
||||||
|
return "—".to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
let byte = byte.get_appropriate_unit(false);
|
||||||
|
|
||||||
|
match byte.get_unit() {
|
||||||
|
byte_unit::ByteUnit::B => format!("{} B ", byte.get_value()),
|
||||||
|
_ => byte.format(1).to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Primitive::Duration(sec) => format_duration(*sec),
|
||||||
|
Primitive::Int(i) => i.to_string(),
|
||||||
|
Primitive::Decimal(decimal) => decimal.to_string(),
|
||||||
|
Primitive::Range(range) => format!(
|
||||||
|
"{}..{}",
|
||||||
|
format_primitive(&range.from.0.item, None),
|
||||||
|
format_primitive(&range.to.0.item, None)
|
||||||
|
),
|
||||||
|
Primitive::Pattern(s) => s.to_string(),
|
||||||
|
Primitive::String(s) => s.to_owned(),
|
||||||
|
Primitive::Line(s) => s.to_owned(),
|
||||||
|
Primitive::ColumnPath(p) => {
|
||||||
|
let mut members = p.iter();
|
||||||
|
let mut f = String::new();
|
||||||
|
|
||||||
|
f.push_str(
|
||||||
|
&members
|
||||||
|
.next()
|
||||||
|
.expect("BUG: column path with zero members")
|
||||||
|
.display(),
|
||||||
|
);
|
||||||
|
|
||||||
|
for member in members {
|
||||||
|
f.push_str(".");
|
||||||
|
f.push_str(&member.display())
|
||||||
|
}
|
||||||
|
|
||||||
|
f
|
||||||
|
}
|
||||||
|
Primitive::Boolean(b) => match (b, field_name) {
|
||||||
|
(true, None) => "Yes",
|
||||||
|
(false, None) => "No",
|
||||||
|
(true, Some(s)) if !s.is_empty() => s,
|
||||||
|
(false, Some(s)) if !s.is_empty() => "",
|
||||||
|
(true, Some(_)) => "Yes",
|
||||||
|
(false, Some(_)) => "No",
|
||||||
|
}
|
||||||
|
.to_owned(),
|
||||||
|
Primitive::Binary(_) => "<binary>".to_owned(),
|
||||||
|
Primitive::Date(d) => d.humanize().to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn format_duration(sec: u64) -> String {
|
||||||
|
let (minutes, seconds) = (sec / 60, sec % 60);
|
||||||
|
let (hours, minutes) = (minutes / 60, minutes % 60);
|
||||||
|
let (days, hours) = (hours / 24, hours % 24);
|
||||||
|
|
||||||
|
match (days, hours, minutes, seconds) {
|
||||||
|
(0, 0, 0, 1) => "1 sec".to_owned(),
|
||||||
|
(0, 0, 0, s) => format!("{} secs", s),
|
||||||
|
(0, 0, m, s) => format!("{}:{:02}", m, s),
|
||||||
|
(0, h, m, s) => format!("{}:{:02}:{:02}", h, m, s),
|
||||||
|
(d, h, m, s) => format!("{}:{:02}:{:02}:{:02}", d, h, m, s),
|
||||||
|
}
|
||||||
|
}
|
32
crates/nu-protocol/src/value/range.rs
Normal file
32
crates/nu-protocol/src/value/range.rs
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
use crate::value::Primitive;
|
||||||
|
use derive_new::new;
|
||||||
|
use nu_source::{b, DebugDocBuilder, Spanned};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Serialize, Deserialize, Hash)]
|
||||||
|
pub enum RangeInclusion {
|
||||||
|
Inclusive,
|
||||||
|
Exclusive,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RangeInclusion {
|
||||||
|
pub fn debug_left_bracket(&self) -> DebugDocBuilder {
|
||||||
|
b::delimiter(match self {
|
||||||
|
RangeInclusion::Exclusive => "(",
|
||||||
|
RangeInclusion::Inclusive => "[",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn debug_right_bracket(&self) -> DebugDocBuilder {
|
||||||
|
b::delimiter(match self {
|
||||||
|
RangeInclusion::Exclusive => ")",
|
||||||
|
RangeInclusion::Inclusive => "]",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Serialize, Deserialize, new)]
|
||||||
|
pub struct Range {
|
||||||
|
pub from: (Spanned<Primitive>, RangeInclusion),
|
||||||
|
pub to: (Spanned<Primitive>, RangeInclusion),
|
||||||
|
}
|
24
crates/nu-protocol/src/value/serde_bigdecimal.rs
Normal file
24
crates/nu-protocol/src/value/serde_bigdecimal.rs
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
use bigdecimal::BigDecimal;
|
||||||
|
use num_traits::cast::FromPrimitive;
|
||||||
|
use num_traits::cast::ToPrimitive;
|
||||||
|
|
||||||
|
pub fn serialize<S>(big_decimal: &BigDecimal, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
serde::Serialize::serialize(
|
||||||
|
&big_decimal
|
||||||
|
.to_f64()
|
||||||
|
.ok_or(serde::ser::Error::custom("expected a f64-sized bignum"))?,
|
||||||
|
serializer,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deserialize<'de, D>(deserializer: D) -> Result<BigDecimal, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let x: f64 = serde::Deserialize::deserialize(deserializer)?;
|
||||||
|
Ok(BigDecimal::from_f64(x)
|
||||||
|
.ok_or(serde::de::Error::custom("expected a f64-sized bigdecimal"))?)
|
||||||
|
}
|
23
crates/nu-protocol/src/value/serde_bigint.rs
Normal file
23
crates/nu-protocol/src/value/serde_bigint.rs
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
use num_bigint::BigInt;
|
||||||
|
use num_traits::cast::FromPrimitive;
|
||||||
|
use num_traits::cast::ToPrimitive;
|
||||||
|
|
||||||
|
pub fn serialize<S>(big_int: &BigInt, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
serde::Serialize::serialize(
|
||||||
|
&big_int
|
||||||
|
.to_i64()
|
||||||
|
.ok_or(serde::ser::Error::custom("expected a i64-sized bignum"))?,
|
||||||
|
serializer,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deserialize<'de, D>(deserializer: D) -> Result<BigInt, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let x: i64 = serde::Deserialize::deserialize(deserializer)?;
|
||||||
|
Ok(BigInt::from_i64(x).ok_or(serde::de::Error::custom("expected a i64-sized bignum"))?)
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-source"
|
name = "nu-source"
|
||||||
version = "0.1.0"
|
version = "0.7.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "A source string characterizer for Nushell"
|
description = "A source string characterizer for Nushell"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
@ -9,8 +9,7 @@ license = "MIT"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
serde = { version = "1.0.103", features = ["derive"] }
|
||||||
serde = { version = "1.0.102", features = ["derive"] }
|
|
||||||
derive-new = "0.5.8"
|
derive-new = "0.5.8"
|
||||||
getset = "0.0.9"
|
getset = "0.0.9"
|
||||||
nom_locate = "1.0.0"
|
nom_locate = "1.0.0"
|
||||||
@ -18,3 +17,6 @@ nom-tracable = "0.4.1"
|
|||||||
language-reporting = "0.4.0"
|
language-reporting = "0.4.0"
|
||||||
termcolor = "1.0.5"
|
termcolor = "1.0.5"
|
||||||
pretty = "0.5.2"
|
pretty = "0.5.2"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
nu-build = { version = "0.7.0", path = "../nu-build" }
|
||||||
|
3
crates/nu-source/build.rs
Normal file
3
crates/nu-source/build.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
nu_build::build()
|
||||||
|
}
|
@ -11,5 +11,6 @@ pub use self::meta::{
|
|||||||
pub use self::pretty::{
|
pub use self::pretty::{
|
||||||
b, DebugDoc, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource, ShellAnnotation,
|
b, DebugDoc, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource, ShellAnnotation,
|
||||||
};
|
};
|
||||||
|
pub use self::term_colored::TermColored;
|
||||||
pub use self::text::Text;
|
pub use self::text::Text;
|
||||||
pub use self::tracable::{nom_input, NomSpan, TracableContext};
|
pub use self::tracable::{nom_input, NomSpan, TracableContext};
|
||||||
|
@ -38,7 +38,7 @@ impl Spanned<String> {
|
|||||||
pub fn items<'a, U>(
|
pub fn items<'a, U>(
|
||||||
items: impl Iterator<Item = &'a Spanned<String>>,
|
items: impl Iterator<Item = &'a Spanned<String>>,
|
||||||
) -> impl Iterator<Item = &'a str> {
|
) -> impl Iterator<Item = &'a str> {
|
||||||
items.into_iter().map(|item| &item.item[..])
|
items.map(|item| &item.item[..])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,7 +156,7 @@ impl<T> Tagged<T> {
|
|||||||
|
|
||||||
Tagged {
|
Tagged {
|
||||||
item: self.item,
|
item: self.item,
|
||||||
tag: tag,
|
tag,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -220,10 +220,7 @@ impl<T>
|
|||||||
nom_locate::LocatedSpanEx<T, u64>,
|
nom_locate::LocatedSpanEx<T, u64>,
|
||||||
),
|
),
|
||||||
) -> Span {
|
) -> Span {
|
||||||
Span {
|
Span::new(input.0.offset, input.1.offset)
|
||||||
start: input.0.offset,
|
|
||||||
end: input.1.offset,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -235,10 +232,7 @@ impl From<(usize, usize)> for Span {
|
|||||||
|
|
||||||
impl From<&std::ops::Range<usize>> for Span {
|
impl From<&std::ops::Range<usize>> for Span {
|
||||||
fn from(input: &std::ops::Range<usize>) -> Span {
|
fn from(input: &std::ops::Range<usize>) -> Span {
|
||||||
Span {
|
Span::new(input.start, input.end)
|
||||||
start: input.start,
|
|
||||||
end: input.end,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -321,10 +315,7 @@ impl Tag {
|
|||||||
pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag {
|
pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag {
|
||||||
Tag {
|
Tag {
|
||||||
anchor: Some(anchor),
|
anchor: Some(anchor),
|
||||||
span: Span {
|
span: Span::new(pos, pos + 1),
|
||||||
start: pos,
|
|
||||||
end: pos + 1,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -528,11 +519,19 @@ impl Span {
|
|||||||
|
|
||||||
impl language_reporting::ReportingSpan for Span {
|
impl language_reporting::ReportingSpan for Span {
|
||||||
fn with_start(&self, start: usize) -> Self {
|
fn with_start(&self, start: usize) -> Self {
|
||||||
Span::new(start, self.end)
|
if self.end < start {
|
||||||
|
Span::new(start, start)
|
||||||
|
} else {
|
||||||
|
Span::new(start, self.end)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_end(&self, end: usize) -> Self {
|
fn with_end(&self, end: usize) -> Self {
|
||||||
Span::new(self.start, end)
|
if end < self.start {
|
||||||
|
Span::new(end, end)
|
||||||
|
} else {
|
||||||
|
Span::new(self.start, end)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start(&self) -> usize {
|
fn start(&self) -> usize {
|
||||||
|
@ -135,7 +135,7 @@ impl DebugDocBuilder {
|
|||||||
DebugDocBuilder::styled(string, ShellStyle::Value)
|
DebugDocBuilder::styled(string, ShellStyle::Value)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_value(self) -> DebugDocBuilder {
|
pub fn into_value(self) -> DebugDocBuilder {
|
||||||
self.inner
|
self.inner
|
||||||
.annotate(ShellAnnotation::style(ShellStyle::Value))
|
.annotate(ShellAnnotation::style(ShellStyle::Value))
|
||||||
.into()
|
.into()
|
||||||
@ -149,7 +149,7 @@ impl DebugDocBuilder {
|
|||||||
DebugDocBuilder::styled(string, ShellStyle::Kind)
|
DebugDocBuilder::styled(string, ShellStyle::Kind)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_kind(self) -> DebugDocBuilder {
|
pub fn into_kind(self) -> DebugDocBuilder {
|
||||||
self.inner
|
self.inner
|
||||||
.annotate(ShellAnnotation::style(ShellStyle::Kind))
|
.annotate(ShellAnnotation::style(ShellStyle::Kind))
|
||||||
.into()
|
.into()
|
||||||
@ -316,7 +316,7 @@ impl DebugDocBuilder {
|
|||||||
result = result + item;
|
result = result + item;
|
||||||
}
|
}
|
||||||
|
|
||||||
result.into()
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
fn styled(string: impl std::fmt::Display, style: ShellStyle) -> DebugDocBuilder {
|
fn styled(string: impl std::fmt::Display, style: ShellStyle) -> DebugDocBuilder {
|
||||||
|
16
crates/nu-test-support/Cargo.toml
Normal file
16
crates/nu-test-support/Cargo.toml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
[package]
|
||||||
|
name = "nu-test-support"
|
||||||
|
version = "0.7.0"
|
||||||
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
|
edition = "2018"
|
||||||
|
description = "A source string characterizer for Nushell"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
app_dirs = "1.2.1"
|
||||||
|
dunce = "1.0.0"
|
||||||
|
getset = "0.0.9"
|
||||||
|
glob = "0.3.0"
|
||||||
|
tempfile = "3.1.0"
|
228
crates/nu-test-support/src/fs.rs
Normal file
228
crates/nu-test-support/src/fs.rs
Normal file
@ -0,0 +1,228 @@
|
|||||||
|
use std::io::Read;
|
||||||
|
use std::ops::Div;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
pub struct AbsoluteFile {
|
||||||
|
inner: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AbsoluteFile {
|
||||||
|
pub fn new(path: impl AsRef<Path>) -> AbsoluteFile {
|
||||||
|
let path = path.as_ref();
|
||||||
|
|
||||||
|
if !path.is_absolute() {
|
||||||
|
panic!(
|
||||||
|
"AbsoluteFile::new must take an absolute path :: {}",
|
||||||
|
path.display()
|
||||||
|
)
|
||||||
|
} else if path.is_dir() {
|
||||||
|
// At the moment, this is not an invariant, but rather a way to catch bugs
|
||||||
|
// in tests.
|
||||||
|
panic!(
|
||||||
|
"AbsoluteFile::new must not take a directory :: {}",
|
||||||
|
path.display()
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
AbsoluteFile {
|
||||||
|
inner: path.to_path_buf(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dir(&self) -> AbsolutePath {
|
||||||
|
AbsolutePath::new(self.inner.parent().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<AbsoluteFile> for PathBuf {
|
||||||
|
fn from(file: AbsoluteFile) -> Self {
|
||||||
|
file.inner
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct AbsolutePath {
|
||||||
|
inner: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AbsolutePath {
|
||||||
|
pub fn new(path: impl AsRef<Path>) -> AbsolutePath {
|
||||||
|
let path = path.as_ref();
|
||||||
|
|
||||||
|
if path.is_absolute() {
|
||||||
|
AbsolutePath {
|
||||||
|
inner: path.to_path_buf(),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
panic!("AbsolutePath::new must take an absolute path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Div<&str> for &AbsolutePath {
|
||||||
|
type Output = AbsolutePath;
|
||||||
|
|
||||||
|
fn div(self, rhs: &str) -> Self::Output {
|
||||||
|
let parts = rhs.split('/');
|
||||||
|
let mut result = self.inner.clone();
|
||||||
|
|
||||||
|
for part in parts {
|
||||||
|
result = result.join(part);
|
||||||
|
}
|
||||||
|
|
||||||
|
AbsolutePath::new(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<Path> for AbsolutePath {
|
||||||
|
fn as_ref(&self) -> &Path {
|
||||||
|
self.inner.as_path()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct RelativePath {
|
||||||
|
inner: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RelativePath {
|
||||||
|
pub fn new(path: impl Into<PathBuf>) -> RelativePath {
|
||||||
|
let path = path.into();
|
||||||
|
|
||||||
|
if path.is_relative() {
|
||||||
|
RelativePath { inner: path }
|
||||||
|
} else {
|
||||||
|
panic!("RelativePath::new must take a relative path")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: AsRef<str>> Div<T> for &RelativePath {
|
||||||
|
type Output = RelativePath;
|
||||||
|
|
||||||
|
fn div(self, rhs: T) -> Self::Output {
|
||||||
|
let parts = rhs.as_ref().split('/');
|
||||||
|
let mut result = self.inner.clone();
|
||||||
|
|
||||||
|
for part in parts {
|
||||||
|
result = result.join(part);
|
||||||
|
}
|
||||||
|
|
||||||
|
RelativePath::new(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub trait DisplayPath {
|
||||||
|
fn display_path(&self) -> String;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DisplayPath for AbsolutePath {
|
||||||
|
fn display_path(&self) -> String {
|
||||||
|
self.inner.display().to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DisplayPath for PathBuf {
|
||||||
|
fn display_path(&self) -> String {
|
||||||
|
self.display().to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DisplayPath for str {
|
||||||
|
fn display_path(&self) -> String {
|
||||||
|
self.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DisplayPath for &str {
|
||||||
|
fn display_path(&self) -> String {
|
||||||
|
self.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DisplayPath for String {
|
||||||
|
fn display_path(&self) -> String {
|
||||||
|
self.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DisplayPath for &String {
|
||||||
|
fn display_path(&self) -> String {
|
||||||
|
self.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub enum Stub<'a> {
|
||||||
|
FileWithContent(&'a str, &'a str),
|
||||||
|
FileWithContentToBeTrimmed(&'a str, &'a str),
|
||||||
|
EmptyFile(&'a str),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_contents(full_path: impl AsRef<Path>) -> String {
|
||||||
|
let mut file = std::fs::File::open(full_path.as_ref()).expect("can not open file");
|
||||||
|
let mut contents = String::new();
|
||||||
|
file.read_to_string(&mut contents)
|
||||||
|
.expect("can not read file");
|
||||||
|
contents
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_contents_binary(full_path: impl AsRef<Path>) -> Vec<u8> {
|
||||||
|
let mut file = std::fs::File::open(full_path.as_ref()).expect("can not open file");
|
||||||
|
let mut contents = Vec::new();
|
||||||
|
file.read_to_end(&mut contents).expect("can not read file");
|
||||||
|
contents
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn line_ending() -> String {
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
String::from("\r\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
{
|
||||||
|
String::from("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_file_at(full_path: impl AsRef<Path>) {
|
||||||
|
let full_path = full_path.as_ref();
|
||||||
|
|
||||||
|
if full_path.exists() {
|
||||||
|
std::fs::remove_file(full_path).expect("can not delete file");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_file_at(full_path: impl AsRef<Path>) -> Result<(), std::io::Error> {
|
||||||
|
let full_path = full_path.as_ref();
|
||||||
|
|
||||||
|
if let Some(parent) = full_path.parent() {
|
||||||
|
panic!(format!("{:?} exists", parent.display()));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::fs::write(full_path, "fake data".as_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn copy_file_to(source: &str, destination: &str) {
|
||||||
|
std::fs::copy(source, destination).expect("can not copy file");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn files_exist_at(files: Vec<impl AsRef<Path>>, path: impl AsRef<Path>) -> bool {
|
||||||
|
files.iter().all(|f| {
|
||||||
|
let mut loc = PathBuf::from(path.as_ref());
|
||||||
|
loc.push(f);
|
||||||
|
loc.exists()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_directory_at(full_path: &str) {
|
||||||
|
std::fs::remove_dir_all(PathBuf::from(full_path)).expect("can not remove directory");
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn executable_path() -> PathBuf {
|
||||||
|
let mut buf = PathBuf::new();
|
||||||
|
buf.push("target");
|
||||||
|
buf.push("debug");
|
||||||
|
buf.push("nu");
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn in_directory(str: impl AsRef<Path>) -> String {
|
||||||
|
str.as_ref().display().to_string()
|
||||||
|
}
|
38
crates/nu-test-support/src/lib.rs
Normal file
38
crates/nu-test-support/src/lib.rs
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
pub mod fs;
|
||||||
|
pub mod macros;
|
||||||
|
pub mod playground;
|
||||||
|
|
||||||
|
pub fn pipeline(commands: &str) -> String {
|
||||||
|
commands
|
||||||
|
.lines()
|
||||||
|
.skip(1)
|
||||||
|
.map(|line| line.trim())
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.join(" ")
|
||||||
|
.trim_end()
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(tests)]
|
||||||
|
mod tests {
|
||||||
|
use super::pipeline;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn constructs_a_pipeline() {
|
||||||
|
let actual = pipeline(
|
||||||
|
r#"
|
||||||
|
open los_tres_amigos.txt
|
||||||
|
| from-csv
|
||||||
|
| get rusty_luck
|
||||||
|
| str --to-int
|
||||||
|
| sum
|
||||||
|
| echo "$it"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
actual,
|
||||||
|
r#"open los_tres_amigos.txt | from-csv | get rusty_luck | str --to-int | sum | echo "$it""#
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
105
crates/nu-test-support/src/macros.rs
Normal file
105
crates/nu-test-support/src/macros.rs
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
#[macro_export]
|
||||||
|
macro_rules! nu {
|
||||||
|
(cwd: $cwd:expr, $path:expr, $($part:expr),*) => {{
|
||||||
|
use $crate::fs::DisplayPath;
|
||||||
|
|
||||||
|
let path = format!($path, $(
|
||||||
|
$part.display_path()
|
||||||
|
),*);
|
||||||
|
|
||||||
|
nu!($cwd, &path)
|
||||||
|
}};
|
||||||
|
|
||||||
|
(cwd: $cwd:expr, $path:expr) => {{
|
||||||
|
nu!($cwd, $path)
|
||||||
|
}};
|
||||||
|
|
||||||
|
($cwd:expr, $path:expr) => {{
|
||||||
|
pub use std::error::Error;
|
||||||
|
pub use std::io::prelude::*;
|
||||||
|
pub use std::process::{Command, Stdio};
|
||||||
|
|
||||||
|
let commands = &*format!(
|
||||||
|
"
|
||||||
|
cd {}
|
||||||
|
{}
|
||||||
|
exit",
|
||||||
|
$crate::fs::in_directory($cwd),
|
||||||
|
$crate::fs::DisplayPath::display_path(&$path)
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut process = match Command::new($crate::fs::executable_path())
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
{
|
||||||
|
Ok(child) => child,
|
||||||
|
Err(why) => panic!("Can't run test {}", why.description()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let stdin = process.stdin.as_mut().expect("couldn't open stdin");
|
||||||
|
stdin
|
||||||
|
.write_all(commands.as_bytes())
|
||||||
|
.expect("couldn't write to stdin");
|
||||||
|
|
||||||
|
|
||||||
|
let output = process
|
||||||
|
.wait_with_output()
|
||||||
|
.expect("couldn't read from stdout");
|
||||||
|
|
||||||
|
let out = String::from_utf8_lossy(&output.stdout);
|
||||||
|
let out = out.replace("\r\n", "");
|
||||||
|
let out = out.replace("\n", "");
|
||||||
|
out
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! nu_error {
|
||||||
|
(cwd: $cwd:expr, $path:expr, $($part:expr),*) => {{
|
||||||
|
use $crate::fs::DisplayPath;
|
||||||
|
|
||||||
|
let path = format!($path, $(
|
||||||
|
$part.display_path()
|
||||||
|
),*);
|
||||||
|
|
||||||
|
nu_error!($cwd, &path)
|
||||||
|
}};
|
||||||
|
|
||||||
|
(cwd: $cwd:expr, $path:expr) => {{
|
||||||
|
nu_error!($cwd, $path)
|
||||||
|
}};
|
||||||
|
|
||||||
|
($cwd:expr, $path:expr) => {{
|
||||||
|
pub use std::error::Error;
|
||||||
|
pub use std::io::prelude::*;
|
||||||
|
pub use std::process::{Command, Stdio};
|
||||||
|
|
||||||
|
let commands = &*format!(
|
||||||
|
"
|
||||||
|
cd {}
|
||||||
|
{}
|
||||||
|
exit",
|
||||||
|
$crate::fs::in_directory($cwd),
|
||||||
|
$crate::fs::DisplayPath::display_path(&$path)
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut process = Command::new($crate::fs::executable_path())
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()
|
||||||
|
.expect("couldn't run test");
|
||||||
|
|
||||||
|
let stdin = process.stdin.as_mut().expect("couldn't open stdin");
|
||||||
|
stdin
|
||||||
|
.write_all(commands.as_bytes())
|
||||||
|
.expect("couldn't write to stdin");
|
||||||
|
|
||||||
|
let output = process
|
||||||
|
.wait_with_output()
|
||||||
|
.expect("couldn't read from stderr");
|
||||||
|
|
||||||
|
let out = String::from_utf8_lossy(&output.stderr);
|
||||||
|
out.into_owned()
|
||||||
|
}};
|
||||||
|
}
|
152
crates/nu-test-support/src/playground.rs
Normal file
152
crates/nu-test-support/src/playground.rs
Normal file
@ -0,0 +1,152 @@
|
|||||||
|
use crate::fs::line_ending;
|
||||||
|
use crate::fs::Stub;
|
||||||
|
|
||||||
|
use getset::Getters;
|
||||||
|
use glob::glob;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use tempfile::{tempdir, TempDir};
|
||||||
|
|
||||||
|
pub struct Playground {
|
||||||
|
root: TempDir,
|
||||||
|
tests: String,
|
||||||
|
cwd: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Getters)]
|
||||||
|
#[get = "pub"]
|
||||||
|
pub struct Dirs {
|
||||||
|
pub root: PathBuf,
|
||||||
|
pub test: PathBuf,
|
||||||
|
pub fixtures: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Dirs {
|
||||||
|
pub fn formats(&self) -> PathBuf {
|
||||||
|
PathBuf::from(self.fixtures.join("formats"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Playground {
|
||||||
|
pub fn root(&self) -> &Path {
|
||||||
|
self.root.path()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn back_to_playground(&mut self) -> &mut Self {
|
||||||
|
self.cwd = PathBuf::from(self.root()).join(self.tests.clone());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn setup(topic: &str, block: impl FnOnce(Dirs, &mut Playground)) {
|
||||||
|
let root = tempdir().expect("Couldn't create a tempdir");
|
||||||
|
let nuplay_dir = root.path().join(topic);
|
||||||
|
|
||||||
|
if PathBuf::from(&nuplay_dir).exists() {
|
||||||
|
std::fs::remove_dir_all(PathBuf::from(&nuplay_dir)).expect("can not remove directory");
|
||||||
|
}
|
||||||
|
|
||||||
|
std::fs::create_dir(PathBuf::from(&nuplay_dir)).expect("can not create directory");
|
||||||
|
|
||||||
|
let mut playground = Playground {
|
||||||
|
root: root,
|
||||||
|
tests: topic.to_string(),
|
||||||
|
cwd: nuplay_dir,
|
||||||
|
};
|
||||||
|
|
||||||
|
let project_root = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||||
|
let playground_root = playground.root.path();
|
||||||
|
|
||||||
|
let fixtures = project_root;
|
||||||
|
let fixtures = fixtures
|
||||||
|
.parent()
|
||||||
|
.expect("Couldn't find the fixtures directory")
|
||||||
|
.parent()
|
||||||
|
.expect("Couldn't find the fixtures directory")
|
||||||
|
.join("tests/fixtures");
|
||||||
|
|
||||||
|
let fixtures = dunce::canonicalize(fixtures.clone()).expect(&format!(
|
||||||
|
"Couldn't canonicalize fixtures path {}",
|
||||||
|
fixtures.display()
|
||||||
|
));
|
||||||
|
|
||||||
|
let test =
|
||||||
|
dunce::canonicalize(PathBuf::from(playground_root.join(topic))).expect(&format!(
|
||||||
|
"Couldn't canonicalize test path {}",
|
||||||
|
playground_root.join(topic).display()
|
||||||
|
));
|
||||||
|
|
||||||
|
let root = dunce::canonicalize(playground_root).expect(&format!(
|
||||||
|
"Couldn't canonicalize tests root path {}",
|
||||||
|
playground_root.display()
|
||||||
|
));
|
||||||
|
|
||||||
|
let dirs = Dirs {
|
||||||
|
root,
|
||||||
|
test,
|
||||||
|
fixtures,
|
||||||
|
};
|
||||||
|
|
||||||
|
block(dirs, &mut playground);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn mkdir(&mut self, directory: &str) -> &mut Self {
|
||||||
|
self.cwd.push(directory);
|
||||||
|
std::fs::create_dir_all(&self.cwd).expect("can not create directory");
|
||||||
|
self.back_to_playground();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_files(&mut self, files: Vec<Stub>) -> &mut Self {
|
||||||
|
let endl = line_ending();
|
||||||
|
|
||||||
|
files
|
||||||
|
.iter()
|
||||||
|
.map(|f| {
|
||||||
|
let mut path = PathBuf::from(&self.cwd);
|
||||||
|
|
||||||
|
let (file_name, contents) = match *f {
|
||||||
|
Stub::EmptyFile(name) => (name, "fake data".to_string()),
|
||||||
|
Stub::FileWithContent(name, content) => (name, content.to_string()),
|
||||||
|
Stub::FileWithContentToBeTrimmed(name, content) => (
|
||||||
|
name,
|
||||||
|
content
|
||||||
|
.lines()
|
||||||
|
.skip(1)
|
||||||
|
.map(|line| line.trim())
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.join(&endl),
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
path.push(file_name);
|
||||||
|
|
||||||
|
std::fs::write(PathBuf::from(path), contents.as_bytes())
|
||||||
|
.expect("can not create file");
|
||||||
|
})
|
||||||
|
.for_each(drop);
|
||||||
|
self.back_to_playground();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn within(&mut self, directory: &str) -> &mut Self {
|
||||||
|
self.cwd.push(directory);
|
||||||
|
std::fs::create_dir(&self.cwd).expect("can not create directory");
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn glob_vec(pattern: &str) -> Vec<PathBuf> {
|
||||||
|
let glob = glob(pattern);
|
||||||
|
|
||||||
|
match glob {
|
||||||
|
Ok(paths) => paths
|
||||||
|
.map(|path| {
|
||||||
|
if let Ok(path) = path {
|
||||||
|
path
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
Err(_) => panic!("Invalid pattern."),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
21
crates/nu-value-ext/Cargo.toml
Normal file
21
crates/nu-value-ext/Cargo.toml
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
[package]
|
||||||
|
name = "nu-value-ext"
|
||||||
|
version = "0.7.0"
|
||||||
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
|
edition = "2018"
|
||||||
|
description = "Extension traits for values in Nushell"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-source = { path = "../nu-source", version = "0.7.0" }
|
||||||
|
nu-errors = { path = "../nu-errors", version = "0.7.0" }
|
||||||
|
nu-parser = { path = "../nu-parser", version = "0.7.0" }
|
||||||
|
nu-protocol = { path = "../nu-protocol", version = "0.7.0" }
|
||||||
|
|
||||||
|
num-traits = "0.2.10"
|
||||||
|
itertools = "0.8.2"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
nu-build = { version = "0.7.0", path = "../nu-build" }
|
3
crates/nu-value-ext/build.rs
Normal file
3
crates/nu-value-ext/build.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
nu_build::build()
|
||||||
|
}
|
515
crates/nu-value-ext/src/lib.rs
Normal file
515
crates/nu-value-ext/src/lib.rs
Normal file
@ -0,0 +1,515 @@
|
|||||||
|
use itertools::Itertools;
|
||||||
|
use nu_errors::{ExpectedRange, ShellError};
|
||||||
|
use nu_protocol::{
|
||||||
|
ColumnPath, MaybeOwned, PathMember, Primitive, ShellTypeName, SpannedTypeName,
|
||||||
|
UnspannedPathMember, UntaggedValue, Value,
|
||||||
|
};
|
||||||
|
use nu_source::{HasSpan, PrettyDebug, Spanned, SpannedItem, Tag, Tagged, TaggedItem};
|
||||||
|
use num_traits::cast::ToPrimitive;
|
||||||
|
|
||||||
|
pub trait ValueExt {
|
||||||
|
fn into_parts(self) -> (UntaggedValue, Tag);
|
||||||
|
fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value>;
|
||||||
|
fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value>;
|
||||||
|
fn get_data_by_member(&self, name: &PathMember) -> Result<Value, ShellError>;
|
||||||
|
fn get_data_by_column_path(
|
||||||
|
&self,
|
||||||
|
path: &ColumnPath,
|
||||||
|
callback: Box<dyn FnOnce((&Value, &PathMember, ShellError)) -> ShellError>,
|
||||||
|
) -> Result<Value, ShellError>;
|
||||||
|
fn insert_data_at_path(&self, path: &str, new_value: Value) -> Option<Value>;
|
||||||
|
fn insert_data_at_member(
|
||||||
|
&mut self,
|
||||||
|
member: &PathMember,
|
||||||
|
new_value: Value,
|
||||||
|
) -> Result<(), ShellError>;
|
||||||
|
fn insert_data_at_column_path(
|
||||||
|
&self,
|
||||||
|
split_path: &ColumnPath,
|
||||||
|
new_value: Value,
|
||||||
|
) -> Result<Value, ShellError>;
|
||||||
|
fn replace_data_at_column_path(
|
||||||
|
&self,
|
||||||
|
split_path: &ColumnPath,
|
||||||
|
replaced_value: Value,
|
||||||
|
) -> Option<Value>;
|
||||||
|
fn as_column_path(&self) -> Result<Tagged<ColumnPath>, ShellError>;
|
||||||
|
fn as_path_member(&self) -> Result<PathMember, ShellError>;
|
||||||
|
fn as_string(&self) -> Result<String, ShellError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ValueExt for Value {
|
||||||
|
fn into_parts(self) -> (UntaggedValue, Tag) {
|
||||||
|
(self.value, self.tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> {
|
||||||
|
get_data(self, desc)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value> {
|
||||||
|
get_data_by_key(self, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_data_by_member(&self, name: &PathMember) -> Result<Value, ShellError> {
|
||||||
|
get_data_by_member(self, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_data_by_column_path(
|
||||||
|
&self,
|
||||||
|
path: &ColumnPath,
|
||||||
|
callback: Box<dyn FnOnce((&Value, &PathMember, ShellError)) -> ShellError>,
|
||||||
|
) -> Result<Value, ShellError> {
|
||||||
|
get_data_by_column_path(self, path, callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_data_at_path(&self, path: &str, new_value: Value) -> Option<Value> {
|
||||||
|
insert_data_at_path(self, path, new_value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_data_at_member(
|
||||||
|
&mut self,
|
||||||
|
member: &PathMember,
|
||||||
|
new_value: Value,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
insert_data_at_member(self, member, new_value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_data_at_column_path(
|
||||||
|
&self,
|
||||||
|
split_path: &ColumnPath,
|
||||||
|
new_value: Value,
|
||||||
|
) -> Result<Value, ShellError> {
|
||||||
|
insert_data_at_column_path(self, split_path, new_value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn replace_data_at_column_path(
|
||||||
|
&self,
|
||||||
|
split_path: &ColumnPath,
|
||||||
|
replaced_value: Value,
|
||||||
|
) -> Option<Value> {
|
||||||
|
replace_data_at_column_path(self, split_path, replaced_value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_column_path(&self) -> Result<Tagged<ColumnPath>, ShellError> {
|
||||||
|
as_column_path(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_path_member(&self) -> Result<PathMember, ShellError> {
|
||||||
|
as_path_member(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_string(&self) -> Result<String, ShellError> {
|
||||||
|
as_string(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_data_by_member(value: &Value, name: &PathMember) -> Result<Value, ShellError> {
|
||||||
|
match &value.value {
|
||||||
|
// If the value is a row, the member is a column name
|
||||||
|
UntaggedValue::Row(o) => match &name.unspanned {
|
||||||
|
// If the member is a string, get the data
|
||||||
|
UnspannedPathMember::String(string) => o
|
||||||
|
.get_data_by_key(string[..].spanned(name.span))
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ShellError::missing_property(
|
||||||
|
"row".spanned(value.tag.span),
|
||||||
|
string.spanned(name.span),
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
|
||||||
|
// If the member is a number, it's an error
|
||||||
|
UnspannedPathMember::Int(_) => Err(ShellError::invalid_integer_index(
|
||||||
|
"row".spanned(value.tag.span),
|
||||||
|
name.span,
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
|
||||||
|
// If the value is a table
|
||||||
|
UntaggedValue::Table(l) => {
|
||||||
|
match &name.unspanned {
|
||||||
|
// If the member is a string, map over the member
|
||||||
|
UnspannedPathMember::String(string) => {
|
||||||
|
let mut out = vec![];
|
||||||
|
|
||||||
|
for item in l {
|
||||||
|
if let Value {
|
||||||
|
value: UntaggedValue::Row(o),
|
||||||
|
..
|
||||||
|
} = item
|
||||||
|
{
|
||||||
|
if let Some(v) = o.get_data_by_key(string[..].spanned(name.span)) {
|
||||||
|
out.push(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if out.is_empty() {
|
||||||
|
Err(ShellError::missing_property(
|
||||||
|
"table".spanned(value.tag.span),
|
||||||
|
string.spanned(name.span),
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Ok(UntaggedValue::Table(out)
|
||||||
|
.into_value(Tag::new(value.anchor(), name.span)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
UnspannedPathMember::Int(int) => {
|
||||||
|
let index = int.to_usize().ok_or_else(|| {
|
||||||
|
ShellError::range_error(
|
||||||
|
ExpectedRange::Usize,
|
||||||
|
&"massive integer".spanned(name.span),
|
||||||
|
"indexing",
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
match get_data_by_index(value, index.spanned(value.tag.span)) {
|
||||||
|
Some(v) => Ok(v.clone()),
|
||||||
|
None => Err(ShellError::range_error(
|
||||||
|
0..(l.len()),
|
||||||
|
&int.spanned(name.span),
|
||||||
|
"indexing",
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
other => Err(ShellError::type_error(
|
||||||
|
"row or table",
|
||||||
|
other.type_name().spanned(value.tag.span),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_data_by_column_path(
|
||||||
|
value: &Value,
|
||||||
|
path: &ColumnPath,
|
||||||
|
callback: Box<dyn FnOnce((&Value, &PathMember, ShellError)) -> ShellError>,
|
||||||
|
) -> Result<Value, ShellError> {
|
||||||
|
let mut current = value.clone();
|
||||||
|
|
||||||
|
for p in path.iter() {
|
||||||
|
let value = get_data_by_member(¤t, p);
|
||||||
|
|
||||||
|
match value {
|
||||||
|
Ok(v) => current = v.clone(),
|
||||||
|
Err(e) => return Err(callback((¤t.clone(), &p.clone(), e))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(current)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert_data_at_path(value: &Value, path: &str, new_value: Value) -> Option<Value> {
|
||||||
|
let mut new_obj = value.clone();
|
||||||
|
|
||||||
|
let split_path: Vec<_> = path.split('.').collect();
|
||||||
|
|
||||||
|
if let UntaggedValue::Row(ref mut o) = new_obj.value {
|
||||||
|
let mut current = o;
|
||||||
|
|
||||||
|
if split_path.len() == 1 {
|
||||||
|
// Special case for inserting at the top level
|
||||||
|
current.entries.insert(
|
||||||
|
path.to_string(),
|
||||||
|
new_value.value.clone().into_value(&value.tag),
|
||||||
|
);
|
||||||
|
return Some(new_obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx in 0..split_path.len() {
|
||||||
|
match current.entries.get_mut(split_path[idx]) {
|
||||||
|
Some(next) => {
|
||||||
|
if idx == (split_path.len() - 2) {
|
||||||
|
if let UntaggedValue::Row(o) = &mut next.value {
|
||||||
|
o.entries.insert(
|
||||||
|
split_path[idx + 1].to_string(),
|
||||||
|
new_value.value.clone().into_value(&value.tag),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return Some(new_obj.clone());
|
||||||
|
} else {
|
||||||
|
match next.value {
|
||||||
|
UntaggedValue::Row(ref mut o) => {
|
||||||
|
current = o;
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert_data_at_member(
|
||||||
|
value: &mut Value,
|
||||||
|
member: &PathMember,
|
||||||
|
new_value: Value,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
match &mut value.value {
|
||||||
|
UntaggedValue::Row(dict) => match &member.unspanned {
|
||||||
|
UnspannedPathMember::String(key) => {
|
||||||
|
dict.insert_data_at_key(key, new_value);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
UnspannedPathMember::Int(_) => Err(ShellError::type_error(
|
||||||
|
"column name",
|
||||||
|
"integer".spanned(member.span),
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
UntaggedValue::Table(array) => match &member.unspanned {
|
||||||
|
UnspannedPathMember::String(_) => Err(ShellError::type_error(
|
||||||
|
"list index",
|
||||||
|
"string".spanned(member.span),
|
||||||
|
)),
|
||||||
|
UnspannedPathMember::Int(int) => {
|
||||||
|
let int = int.to_usize().ok_or_else(|| {
|
||||||
|
ShellError::range_error(
|
||||||
|
ExpectedRange::Usize,
|
||||||
|
&"bigger number".spanned(member.span),
|
||||||
|
"inserting into a list",
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
insert_data_at_index(array, int.tagged(member.span), new_value.clone())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
other => match &member.unspanned {
|
||||||
|
UnspannedPathMember::String(_) => Err(ShellError::type_error(
|
||||||
|
"row",
|
||||||
|
other.type_name().spanned(value.span()),
|
||||||
|
)),
|
||||||
|
UnspannedPathMember::Int(_) => Err(ShellError::type_error(
|
||||||
|
"table",
|
||||||
|
other.type_name().spanned(value.span()),
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert_data_at_column_path(
|
||||||
|
value: &Value,
|
||||||
|
split_path: &ColumnPath,
|
||||||
|
new_value: Value,
|
||||||
|
) -> Result<Value, ShellError> {
|
||||||
|
let (last, front) = split_path.split_last();
|
||||||
|
let mut original = value.clone();
|
||||||
|
|
||||||
|
let mut current: &mut Value = &mut original;
|
||||||
|
|
||||||
|
for member in front {
|
||||||
|
let type_name = current.spanned_type_name();
|
||||||
|
|
||||||
|
current = get_mut_data_by_member(current, &member).ok_or_else(|| {
|
||||||
|
ShellError::missing_property(
|
||||||
|
member.plain_string(std::usize::MAX).spanned(member.span),
|
||||||
|
type_name,
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
|
||||||
|
insert_data_at_member(current, &last, new_value)?;
|
||||||
|
|
||||||
|
Ok(original)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn replace_data_at_column_path(
|
||||||
|
value: &Value,
|
||||||
|
split_path: &ColumnPath,
|
||||||
|
replaced_value: Value,
|
||||||
|
) -> Option<Value> {
|
||||||
|
let mut new_obj: Value = value.clone();
|
||||||
|
let mut current = &mut new_obj;
|
||||||
|
let split_path = split_path.members();
|
||||||
|
|
||||||
|
for idx in 0..split_path.len() {
|
||||||
|
match get_mut_data_by_member(current, &split_path[idx]) {
|
||||||
|
Some(next) => {
|
||||||
|
if idx == (split_path.len() - 1) {
|
||||||
|
*next = replaced_value.value.into_value(&value.tag);
|
||||||
|
return Some(new_obj);
|
||||||
|
} else {
|
||||||
|
current = next;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_column_path(value: &Value) -> Result<Tagged<ColumnPath>, ShellError> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Table(table) => {
|
||||||
|
let mut out: Vec<PathMember> = vec![];
|
||||||
|
|
||||||
|
for item in table {
|
||||||
|
out.push(as_path_member(item)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ColumnPath::new(out).tagged(&value.tag))
|
||||||
|
}
|
||||||
|
|
||||||
|
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||||
|
Ok(ColumnPath::new(vec![PathMember::string(s, &value.tag.span)]).tagged(&value.tag))
|
||||||
|
}
|
||||||
|
|
||||||
|
UntaggedValue::Primitive(Primitive::ColumnPath(path)) => {
|
||||||
|
Ok(path.clone().tagged(value.tag.clone()))
|
||||||
|
}
|
||||||
|
|
||||||
|
other => Err(ShellError::type_error(
|
||||||
|
"column path",
|
||||||
|
other.type_name().spanned(value.span()),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_path_member(value: &Value) -> Result<PathMember, ShellError> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Primitive(primitive) => match primitive {
|
||||||
|
Primitive::Int(int) => Ok(PathMember::int(int.clone(), value.tag.span)),
|
||||||
|
Primitive::String(string) => Ok(PathMember::string(string, value.tag.span)),
|
||||||
|
other => Err(ShellError::type_error(
|
||||||
|
"path member",
|
||||||
|
other.type_name().spanned(value.span()),
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
other => Err(ShellError::type_error(
|
||||||
|
"path member",
|
||||||
|
other.type_name().spanned(value.span()),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_string(value: &Value) -> Result<String, ShellError> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||||
|
UntaggedValue::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
|
||||||
|
UntaggedValue::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
|
||||||
|
UntaggedValue::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
|
||||||
|
UntaggedValue::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
|
||||||
|
UntaggedValue::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
|
||||||
|
UntaggedValue::Primitive(Primitive::ColumnPath(path)) => {
|
||||||
|
Ok(path.iter().map(|member| member.display()).join("."))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: this should definitely be more general with better errors
|
||||||
|
other => Err(ShellError::labeled_error(
|
||||||
|
"Expected string",
|
||||||
|
other.type_name(),
|
||||||
|
&value.tag,
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_data_at_index(
|
||||||
|
list: &mut Vec<Value>,
|
||||||
|
index: Tagged<usize>,
|
||||||
|
new_value: Value,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
if list.len() >= index.item {
|
||||||
|
Err(ShellError::range_error(
|
||||||
|
0..(list.len()),
|
||||||
|
&format_args!("{}", index.item).spanned(index.tag.span),
|
||||||
|
"insert at index",
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
list[index.item] = new_value;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_data<'value>(value: &'value Value, desc: &String) -> MaybeOwned<'value, Value> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Primitive(_) => MaybeOwned::Borrowed(value),
|
||||||
|
UntaggedValue::Row(o) => o.get_data(desc),
|
||||||
|
UntaggedValue::Block(_) | UntaggedValue::Table(_) | UntaggedValue::Error(_) => {
|
||||||
|
MaybeOwned::Owned(UntaggedValue::nothing().into_untagged_value())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_data_by_index(value: &Value, idx: Spanned<usize>) -> Option<Value> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Table(value_set) => {
|
||||||
|
let value = value_set.get(idx.item)?;
|
||||||
|
Some(
|
||||||
|
value
|
||||||
|
.value
|
||||||
|
.clone()
|
||||||
|
.into_value(Tag::new(value.anchor(), idx.span)),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_data_by_key(value: &Value, name: Spanned<&str>) -> Option<Value> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Row(o) => o.get_data_by_key(name),
|
||||||
|
UntaggedValue::Table(l) => {
|
||||||
|
let mut out = vec![];
|
||||||
|
for item in l {
|
||||||
|
match item {
|
||||||
|
Value {
|
||||||
|
value: UntaggedValue::Row(o),
|
||||||
|
..
|
||||||
|
} => match o.get_data_by_key(name) {
|
||||||
|
Some(v) => out.push(v),
|
||||||
|
None => out.push(UntaggedValue::nothing().into_untagged_value()),
|
||||||
|
},
|
||||||
|
_ => out.push(UntaggedValue::nothing().into_untagged_value()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !out.is_empty() {
|
||||||
|
Some(UntaggedValue::Table(out).into_value(name.span))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_mut_data_by_member<'value>(
|
||||||
|
value: &'value mut Value,
|
||||||
|
name: &PathMember,
|
||||||
|
) -> Option<&'value mut Value> {
|
||||||
|
match &mut value.value {
|
||||||
|
UntaggedValue::Row(o) => match &name.unspanned {
|
||||||
|
UnspannedPathMember::String(string) => o.get_mut_data_by_key(&string),
|
||||||
|
UnspannedPathMember::Int(_) => None,
|
||||||
|
},
|
||||||
|
UntaggedValue::Table(l) => match &name.unspanned {
|
||||||
|
UnspannedPathMember::String(string) => {
|
||||||
|
for item in l {
|
||||||
|
if let Value {
|
||||||
|
value: UntaggedValue::Row(o),
|
||||||
|
..
|
||||||
|
} = item
|
||||||
|
{
|
||||||
|
if let Some(v) = o.get_mut_data_by_key(&string) {
|
||||||
|
return Some(v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
UnspannedPathMember::Int(int) => {
|
||||||
|
let index = int.to_usize()?;
|
||||||
|
l.get_mut(index)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
17
crates/nu_plugin_average/Cargo.toml
Normal file
17
crates/nu_plugin_average/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
[package]
|
||||||
|
name = "nu_plugin_average"
|
||||||
|
version = "0.7.0"
|
||||||
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
|
edition = "2018"
|
||||||
|
description = "An average value plugin for Nushell"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-protocol = { path = "../nu-protocol", version = "0.7.0" }
|
||||||
|
nu-source = { path = "../nu-source", version = "0.7.0" }
|
||||||
|
nu-errors = { path = "../nu-errors", version = "0.7.0" }
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
nu-build = { version = "0.7.0", path = "../nu-build" }
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user