mirror of
https://github.com/nushell/nushell.git
synced 2025-07-01 07:00:37 +02:00
Compare commits
267 Commits
Author | SHA1 | Date | |
---|---|---|---|
407f36af29 | |||
763fcbc137 | |||
7061af712e | |||
9b4ba09c95 | |||
9ec6d0c90e | |||
f20a4a42e8 | |||
caa6830184 | |||
f8be1becf2 | |||
af51a0e6f0 | |||
23d11d5e84 | |||
6da9e2aced | |||
32dfb32741 | |||
d48f99cb0e | |||
35359cbc22 | |||
b52dbcc8ef | |||
4429a75e17 | |||
583f27dc41 | |||
83db5c34c3 | |||
cdbfdf282f | |||
a5e1372bc2 | |||
798a24eda5 | |||
a2bb23d78c | |||
d38a63473b | |||
2b37ae3e81 | |||
bc5a969562 | |||
fe4ad5f77e | |||
07191754bf | |||
66bd331ba9 | |||
762c798670 | |||
3c01526869 | |||
7efb31a4e4 | |||
c8dd7838a8 | |||
3b57ee5dda | |||
fb977ab941 | |||
e059c74a06 | |||
47d987d37f | |||
3abfefc025 | |||
a5c5b4e711 | |||
ba9cb753d5 | |||
ba7a1752db | |||
29431e73c2 | |||
d29fe6f6de | |||
e2e9abab0a | |||
2956b0b087 | |||
b32eceffb3 | |||
3adf52b1c4 | |||
78a644da2b | |||
98028433ad | |||
2ab5803f00 | |||
65980c7beb | |||
29fd8b55fb | |||
2f039b3abc | |||
d3dae05714 | |||
5fd3191d91 | |||
0dcd90cb8f | |||
02d0a4107e | |||
63885c4ee6 | |||
147bfefd7e | |||
60043df917 | |||
6d3a30772d | |||
347f91ab53 | |||
5692a08e7f | |||
515a3b33f8 | |||
c3e466e464 | |||
00c0327031 | |||
7451414b9e | |||
41ebc6b42d | |||
b574dc6365 | |||
4af9e1de41 | |||
77d856fd53 | |||
6dceabf389 | |||
5919c6c433 | |||
339a2de0eb | |||
3e3cb15f3d | |||
5e31851070 | |||
0f626dd076 | |||
aa577bf9bf | |||
25298d35e4 | |||
78016446dc | |||
b304de8199 | |||
72838cc083 | |||
8093612cac | |||
f37f29b441 | |||
dba82ac530 | |||
0615adac94 | |||
21e508009f | |||
a9317d939f | |||
65d843c2a1 | |||
f6c62bf121 | |||
b4bc5fe9af | |||
10368d7060 | |||
68a314b5cb | |||
3c7633ae9f | |||
dba347ad00 | |||
bfba2c57f8 | |||
c69bf9f46f | |||
7ce1ddc6fd | |||
e7ce6f2fcd | |||
0c786bb890 | |||
8d31c32bda | |||
e7fb15be59 | |||
be7550822c | |||
0ce216eec4 | |||
1fe85cb91e | |||
8cadc5a4ac | |||
f9da7f7d58 | |||
367f11a62e | |||
8a45ca9cc3 | |||
e336930fd8 | |||
172ccc910e | |||
a8425daf14 | |||
b629136528 | |||
91ebb7f718 | |||
96484161c0 | |||
d21ddeeae6 | |||
4322d373e6 | |||
08571392e6 | |||
f52235b1c1 | |||
a66147da47 | |||
df778afd1f | |||
d7ddaa376b | |||
2ce892c6f0 | |||
28179ef450 | |||
2c6336c806 | |||
761fc9ae73 | |||
314c3c4a97 | |||
f7f1fba94f | |||
14817ef229 | |||
98233dcec1 | |||
6540509911 | |||
594eae1cbc | |||
5e961815fc | |||
fa9329c8e3 | |||
6c577e18ca | |||
4034129dba | |||
52cf65c19e | |||
cbbb246a6d | |||
87cc6d6f01 | |||
4b9ef5a9d0 | |||
31c703891a | |||
550bda477b | |||
219b7e64cd | |||
98c59f77b2 | |||
e8800fdd0c | |||
09f903c37a | |||
57af9b5040 | |||
16272b1b20 | |||
1dcbd89a89 | |||
eb6ef02ad1 | |||
17586bdfbd | |||
0e98cf3f1e | |||
e2a95c3e1d | |||
5cb7df57fc | |||
88f899d341 | |||
7d70b5feda | |||
fd6ee03391 | |||
9f702fe01a | |||
c9d9eec7f8 | |||
38cbfdb8a9 | |||
f9b7376949 | |||
e98ed1b43d | |||
251c3e103d | |||
d26e938436 | |||
dbadf9499e | |||
28df1559ea | |||
91784218c0 | |||
eeec5e10c3 | |||
0515ed976c | |||
f653992b4a | |||
b5f8c1cc50 | |||
f9a46ce1e7 | |||
b6ba7f97fd | |||
7a47905f11 | |||
683f4c35d9 | |||
dfa5173cf4 | |||
04b214bef6 | |||
37cb7fec77 | |||
8833969e4a | |||
bda238267c | |||
d07dc57537 | |||
d0a2888e88 | |||
cec2eff933 | |||
38b7a3e32b | |||
9dfb6c023f | |||
cde92a9fb9 | |||
5622bbdd48 | |||
3d79a9c37a | |||
a2a5b30568 | |||
768adb84a4 | |||
26b0250e22 | |||
6893850fce | |||
8834e6905e | |||
1d5f13ddca | |||
d12c16a331 | |||
ecf47bb3ab | |||
a4bb5d4ff5 | |||
e9ee7bda46 | |||
1d196394f6 | |||
cfda67ff82 | |||
59510a85d1 | |||
35edf22ac3 | |||
871fc72892 | |||
1fcf671ca4 | |||
ecebe1314a | |||
bda5db59c8 | |||
4526d757b6 | |||
e5405d7f5c | |||
201506a5ad | |||
49f9253ca2 | |||
efc879b955 | |||
3fa03eb7a4 | |||
24bad78607 | |||
8de4c9dbb7 | |||
f858e854bf | |||
87dbd3d5ac | |||
fe66b4c8ea | |||
8390cc97e1 | |||
c0a7d4e2a7 | |||
ce23a672d9 | |||
9851317aeb | |||
3fb4a5d6e6 | |||
340e701124 | |||
36938a4407 | |||
6a6589a357 | |||
b94a32e523 | |||
7db3c69984 | |||
5406450c42 | |||
d6a6e16d21 | |||
ea1b65916d | |||
cd9d9ad50b | |||
552272b37e | |||
388ce738e3 | |||
ef7fbcbe9f | |||
80941ace37 | |||
f317500873 | |||
911414a190 | |||
cca6360bcc | |||
f68503fa21 | |||
911b69dff0 | |||
4115634bfc | |||
8a0bdde17a | |||
a1e21828d6 | |||
0f193c2337 | |||
526d94d862 | |||
2fdafa52b1 | |||
f52c0655c7 | |||
97331c7b25 | |||
1fb5a419a7 | |||
4e9afd6698 | |||
8f9dd6516e | |||
e4226def16 | |||
c199a84dbb | |||
5a4ca11362 | |||
f2968c8385 | |||
8d01b019f4 | |||
bf87330d6e | |||
2bb85bdbd4 | |||
8f34c6eeda | |||
ac5543bad9 | |||
e4c56a25c6 | |||
11ff8190b1 | |||
9bd25d7427 | |||
6bfb4207c4 | |||
c63ad610f5 | |||
e38a4323b4 | |||
d40aea5d0a | |||
fbb65cde44 |
@ -4,25 +4,25 @@ trigger:
|
||||
strategy:
|
||||
matrix:
|
||||
linux-stable:
|
||||
image: ubuntu-16.04
|
||||
image: ubuntu-18.04
|
||||
style: 'unflagged'
|
||||
macos-stable:
|
||||
image: macos-10.14
|
||||
style: 'unflagged'
|
||||
windows-stable:
|
||||
image: vs2017-win2016
|
||||
image: windows-2019
|
||||
style: 'unflagged'
|
||||
linux-nightly-canary:
|
||||
image: ubuntu-16.04
|
||||
image: ubuntu-18.04
|
||||
style: 'canary'
|
||||
macos-nightly-canary:
|
||||
image: macos-10.14
|
||||
style: 'canary'
|
||||
windows-nightly-canary:
|
||||
image: vs2017-win2016
|
||||
image: windows-2019
|
||||
style: 'canary'
|
||||
fmt:
|
||||
image: ubuntu-16.04
|
||||
image: ubuntu-18.04
|
||||
style: 'fmt'
|
||||
|
||||
pool:
|
||||
@ -35,19 +35,28 @@ steps:
|
||||
then
|
||||
sudo apt-get -y install libxcb-composite0-dev libx11-dev
|
||||
fi
|
||||
if [ "$(uname)" == "Darwin" ]; then
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable"
|
||||
export PATH=$HOME/.cargo/bin:$PATH
|
||||
rustup update
|
||||
fi
|
||||
rustc -Vv
|
||||
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
||||
rustup component add rustfmt --toolchain "stable"
|
||||
rustup component add rustfmt
|
||||
displayName: Install Rust
|
||||
- bash: RUSTFLAGS="-D warnings" cargo test --all-features
|
||||
- bash: RUSTFLAGS="-D warnings" cargo test --all --features stable,test-bins
|
||||
condition: eq(variables['style'], 'unflagged')
|
||||
displayName: Run tests
|
||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all-features
|
||||
- bash: RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
|
||||
condition: eq(variables['style'], 'unflagged')
|
||||
displayName: Check clippy lints
|
||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features stable,test-bins
|
||||
condition: eq(variables['style'], 'canary')
|
||||
displayName: Run tests
|
||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
|
||||
condition: eq(variables['style'], 'canary')
|
||||
displayName: Check clippy lints
|
||||
- bash: cargo fmt --all -- --check
|
||||
condition: eq(variables['style'], 'fmt')
|
||||
displayName: Lint
|
||||
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -3,7 +3,7 @@
|
||||
**/*.rs.bk
|
||||
history.txt
|
||||
tests/fixtures/nuplayground
|
||||
|
||||
crates/*/target
|
||||
# Debian/Ubuntu
|
||||
debian/.debhelper/
|
||||
debian/debhelper-build-stamp
|
||||
|
@ -1,7 +1,7 @@
|
||||
image:
|
||||
file: .gitpod.Dockerfile
|
||||
tasks:
|
||||
- init: cargo install nu
|
||||
- init: cargo install --path . --force --features=stable
|
||||
command: nu
|
||||
github:
|
||||
prebuilds:
|
||||
@ -19,3 +19,10 @@ github:
|
||||
addBadge: false
|
||||
# add a label once the prebuild is ready to pull requests (defaults to false)
|
||||
addLabel: prebuilt-in-gitpod
|
||||
vscode:
|
||||
extensions:
|
||||
- hbenl.vscode-test-explorer@2.15.0:koqDUMWDPJzELp/hdS/lWw==
|
||||
- Swellaby.vscode-rust-test-adapter@0.11.0:Xg+YeZZQiVpVUsIkH+uiiw==
|
||||
- serayuzgur.crates@0.4.7:HMkoguLcXp9M3ud7ac3eIw==
|
||||
- belfz.search-crates-io@1.2.1:kSLnyrOhXtYPjQpKnMr4eQ==
|
||||
- vadimcn.vscode-lldb@1.4.5:lwHCNwtm0kmOBXeQUIPGMQ==
|
||||
|
1756
Cargo.lock
generated
1756
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
244
Cargo.toml
244
Cargo.toml
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "nu"
|
||||
version = "0.6.1"
|
||||
version = "0.9.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
description = "A shell for the GitHub era"
|
||||
license = "MIT"
|
||||
@ -9,108 +9,159 @@ readme = "README.md"
|
||||
default-run = "nu"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
homepage = "https://www.nushell.sh"
|
||||
documentation = "https://book.nushell.sh"
|
||||
documentation = "https://www.nushell.sh/book/"
|
||||
|
||||
[workspace]
|
||||
|
||||
members = ["crates/nu-source"]
|
||||
members = [
|
||||
"crates/nu-macros",
|
||||
"crates/nu-errors",
|
||||
"crates/nu-source",
|
||||
"crates/nu_plugin_average",
|
||||
"crates/nu_plugin_binaryview",
|
||||
"crates/nu_plugin_fetch",
|
||||
"crates/nu_plugin_inc",
|
||||
"crates/nu_plugin_match",
|
||||
"crates/nu_plugin_post",
|
||||
"crates/nu_plugin_ps",
|
||||
"crates/nu_plugin_str",
|
||||
"crates/nu_plugin_sum",
|
||||
"crates/nu_plugin_sys",
|
||||
"crates/nu_plugin_textview",
|
||||
"crates/nu_plugin_tree",
|
||||
"crates/nu-protocol",
|
||||
"crates/nu-plugin",
|
||||
"crates/nu-parser",
|
||||
"crates/nu-value-ext",
|
||||
"crates/nu-build"
|
||||
]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
nu-source = { version = "0.1.0", path = "./crates/nu-source" }
|
||||
nu-source = {version = "0.9.0", path = "./crates/nu-source"}
|
||||
nu-plugin = {version = "0.9.0", path = "./crates/nu-plugin"}
|
||||
nu-protocol = {version = "0.9.0", path = "./crates/nu-protocol"}
|
||||
nu-errors = {version = "0.9.0", path = "./crates/nu-errors"}
|
||||
nu-parser = {version = "0.9.0", path = "./crates/nu-parser"}
|
||||
nu-value-ext = {version = "0.9.0", path = "./crates/nu-value-ext"}
|
||||
nu_plugin_average = {version = "0.9.0", path = "./crates/nu_plugin_average", optional=true}
|
||||
nu_plugin_binaryview = {version = "0.9.0", path = "./crates/nu_plugin_binaryview", optional=true}
|
||||
nu_plugin_fetch = {version = "0.9.0", path = "./crates/nu_plugin_fetch", optional=true}
|
||||
nu_plugin_inc = {version = "0.9.0", path = "./crates/nu_plugin_inc", optional=true}
|
||||
nu_plugin_match = {version = "0.9.0", path = "./crates/nu_plugin_match", optional=true}
|
||||
nu_plugin_post = {version = "0.9.0", path = "./crates/nu_plugin_post", optional=true}
|
||||
nu_plugin_ps = {version = "0.9.0", path = "./crates/nu_plugin_ps", optional=true}
|
||||
nu_plugin_str = {version = "0.9.0", path = "./crates/nu_plugin_str", optional=true}
|
||||
nu_plugin_sum = {version = "0.9.0", path = "./crates/nu_plugin_sum", optional=true}
|
||||
nu_plugin_sys = {version = "0.9.0", path = "./crates/nu_plugin_sys", optional=true}
|
||||
nu_plugin_textview = {version = "0.9.0", path = "./crates/nu_plugin_textview", optional=true}
|
||||
nu_plugin_tree = {version = "0.9.0", path = "./crates/nu_plugin_tree", optional=true}
|
||||
nu-macros = { version = "0.9.0", path = "./crates/nu-macros" }
|
||||
|
||||
rustyline = "5.0.4"
|
||||
chrono = { version = "0.4.9", features = ["serde"] }
|
||||
query_interface = "0.3.5"
|
||||
typetag = "0.1.4"
|
||||
rustyline = "6.0.0"
|
||||
chrono = { version = "0.4.10", features = ["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
prettytable-rs = "0.8.0"
|
||||
itertools = "0.8.1"
|
||||
itertools = "0.8.2"
|
||||
ansi_term = "0.12.1"
|
||||
nom = "5.0.1"
|
||||
dunce = "1.0.0"
|
||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||
chrono-humanize = "0.0.11"
|
||||
indexmap = { version = "1.3.1", features = ["serde-1"] }
|
||||
byte-unit = "3.0.3"
|
||||
base64 = "0.11"
|
||||
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
||||
async-stream = "0.1.2"
|
||||
futures_codec = "0.2.5"
|
||||
num-traits = "0.2.8"
|
||||
num-traits = "0.2.11"
|
||||
term = "0.5.2"
|
||||
bytes = "0.4.12"
|
||||
log = "0.4.8"
|
||||
pretty_env_logger = "0.3.1"
|
||||
serde = { version = "1.0.102", features = ["derive"] }
|
||||
serde = { version = "1.0.104", features = ["derive"] }
|
||||
bson = { version = "0.14.0", features = ["decimal128"] }
|
||||
serde_json = "1.0.41"
|
||||
serde_json = "1.0.44"
|
||||
serde-hjson = "0.9.1"
|
||||
serde_yaml = "0.8"
|
||||
serde_bytes = "0.11.2"
|
||||
serde_bytes = "0.11.3"
|
||||
getset = "0.0.9"
|
||||
language-reporting = "0.4.0"
|
||||
app_dirs = "1.2.1"
|
||||
csv = "1.1"
|
||||
toml = "0.5.5"
|
||||
toml = "0.5.6"
|
||||
clap = "2.33.0"
|
||||
git2 = { version = "0.10.1", default_features = false }
|
||||
git2 = { version = "0.11.0", default_features = false }
|
||||
dirs = "2.0.2"
|
||||
glob = "0.3.0"
|
||||
ctrlc = "3.1.3"
|
||||
surf = "1.0.3"
|
||||
url = "2.1.0"
|
||||
roxmltree = "0.7.2"
|
||||
roxmltree = "0.9.0"
|
||||
nom_locate = "1.0.0"
|
||||
nom-tracable = "0.4.1"
|
||||
unicode-xid = "0.2.0"
|
||||
serde_ini = "0.2.0"
|
||||
subprocess = "0.1.18"
|
||||
mime = "0.3.14"
|
||||
pretty-hex = "0.1.1"
|
||||
hex = "0.4"
|
||||
tempfile = "3.1.0"
|
||||
semver = "0.9.0"
|
||||
which = "3.1"
|
||||
which = "3.1.0"
|
||||
ichwh = "0.3"
|
||||
textwrap = {version = "0.11.0", features = ["term_size"]}
|
||||
shellexpand = "1.0.0"
|
||||
futures-timer = "2.0.0"
|
||||
shellexpand = "1.1.1"
|
||||
pin-utils = "0.1.0-alpha.4"
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
num-bigint = { version = "0.2.5", features = ["serde"] }
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
natural = "0.3.0"
|
||||
serde_urlencoded = "0.6.1"
|
||||
sublime_fuzzy = "0.6"
|
||||
trash = "1.0.0"
|
||||
regex = "1"
|
||||
cfg-if = "0.1"
|
||||
strip-ansi-escapes = "0.1.0"
|
||||
calamine = "0.16"
|
||||
umask = "0.1"
|
||||
futures-util = "0.3.0"
|
||||
pretty = "0.5.2"
|
||||
termcolor = "1.0.5"
|
||||
console = "0.9.1"
|
||||
futures-util = "0.3.1"
|
||||
termcolor = "1.1.0"
|
||||
natural = "0.3.0"
|
||||
parking_lot = "0.10.0"
|
||||
futures-timer = "1.0.2"
|
||||
|
||||
neso = { version = "0.5.0", optional = true }
|
||||
crossterm = { version = "0.10.2", optional = true }
|
||||
syntect = {version = "3.2.0", optional = true }
|
||||
onig_sys = {version = "=69.1.0", optional = true }
|
||||
heim = {version = "0.0.8", optional = true }
|
||||
battery = {version = "0.7.4", optional = true }
|
||||
rawkey = {version = "0.1.2", optional = true }
|
||||
clipboard = {version = "0.5", optional = true }
|
||||
ptree = {version = "0.2" }
|
||||
image = { version = "0.22.2", default_features = false, features = ["png_codec", "jpeg"], optional = true }
|
||||
starship = { version = "0.26.4", optional = true}
|
||||
starship = { version = "0.33.1", optional = true}
|
||||
heim = {version = "0.0.9", optional = true}
|
||||
battery = {version = "0.7.5", optional = true}
|
||||
syntect = {version = "3.2.0", optional = true }
|
||||
onig_sys = {version = "=69.1.0", optional = true }
|
||||
crossterm = {version = "0.14.2", optional = true}
|
||||
url = {version = "2.1.1", optional = true}
|
||||
semver = {version = "0.9.0", optional = true}
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
users = "0.9"
|
||||
|
||||
[features]
|
||||
default = ["textview", "sys", "ps"]
|
||||
raw-key = ["rawkey", "neso"]
|
||||
textview = ["syntect", "onig_sys", "crossterm"]
|
||||
binaryview = ["image", "crossterm"]
|
||||
# Test executables
|
||||
test-bins = []
|
||||
|
||||
default = ["sys", "ps", "textview", "inc", "str"]
|
||||
stable = ["default", "starship-prompt", "binaryview", "match", "tree", "average", "sum", "post", "fetch", "clipboard"]
|
||||
|
||||
# Default
|
||||
sys = ["heim", "battery"]
|
||||
ps = ["heim"]
|
||||
textview = ["crossterm", "syntect", "onig_sys", "url"]
|
||||
inc = ["nu_plugin_inc"]
|
||||
str = ["nu_plugin_str"]
|
||||
|
||||
# Stable
|
||||
average = ["nu_plugin_average"]
|
||||
binaryview = ["nu_plugin_binaryview"]
|
||||
fetch = ["nu_plugin_fetch"]
|
||||
match = ["nu_plugin_match"]
|
||||
post = ["nu_plugin_post"]
|
||||
starship-prompt = ["starship"]
|
||||
# trace = ["nom-tracable/trace"]
|
||||
sum = ["nu_plugin_sum"]
|
||||
trace = ["nu-parser/trace"]
|
||||
tree = ["nu_plugin_tree"]
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.20.0"
|
||||
@ -118,89 +169,62 @@ features = ["bundled", "blob"]
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.6.1"
|
||||
nu-test-support = { version = "0.9.0", path = "./crates/nu-test-support" }
|
||||
|
||||
[build-dependencies]
|
||||
toml = "0.5.5"
|
||||
serde = { version = "1.0.102", features = ["derive"] }
|
||||
toml = "0.5.6"
|
||||
serde = { version = "1.0.104", features = ["derive"] }
|
||||
nu-build = { version = "0.9.0", path = "./crates/nu-build" }
|
||||
|
||||
[lib]
|
||||
name = "nu"
|
||||
doctest = false
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_inc"
|
||||
path = "src/plugins/inc.rs"
|
||||
name = "fail"
|
||||
path = "crates/nu-test-support/src/bins/fail.rs"
|
||||
required-features = ["test-bins"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_sum"
|
||||
path = "src/plugins/sum.rs"
|
||||
name = "chop"
|
||||
path = "crates/nu-test-support/src/bins/chop.rs"
|
||||
required-features = ["test-bins"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_average"
|
||||
path = "src/plugins/average.rs"
|
||||
name = "cococo"
|
||||
path = "crates/nu-test-support/src/bins/cococo.rs"
|
||||
required-features = ["test-bins"]
|
||||
|
||||
# Core plugins that ship with `cargo install nu` by default
|
||||
# Currently, Cargo limits us to installing only one binary
|
||||
# unless we use [[bin]], so we use this as a workaround
|
||||
[[bin]]
|
||||
name = "nu_plugin_embed"
|
||||
path = "src/plugins/embed.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_insert"
|
||||
path = "src/plugins/insert.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_edit"
|
||||
path = "src/plugins/edit.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_format"
|
||||
path = "src/plugins/format.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_parse"
|
||||
path = "src/plugins/parse.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_str"
|
||||
path = "src/plugins/str.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_skip"
|
||||
path = "src/plugins/skip.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_match"
|
||||
path = "src/plugins/match.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_sys"
|
||||
path = "src/plugins/sys.rs"
|
||||
required-features = ["sys"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_ps"
|
||||
path = "src/plugins/ps.rs"
|
||||
required-features = ["ps"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_tree"
|
||||
path = "src/plugins/tree.rs"
|
||||
required-features = ["tree"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_binaryview"
|
||||
path = "src/plugins/binaryview.rs"
|
||||
required-features = ["binaryview"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_textview"
|
||||
path = "src/plugins/textview.rs"
|
||||
name = "nu_plugin_core_textview"
|
||||
path = "src/plugins/nu_plugin_core_textview.rs"
|
||||
required-features = ["textview"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_docker"
|
||||
path = "src/plugins/docker.rs"
|
||||
required-features = ["docker"]
|
||||
name = "nu_plugin_core_inc"
|
||||
path = "src/plugins/nu_plugin_core_inc.rs"
|
||||
required-features = ["inc"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_core_ps"
|
||||
path = "src/plugins/nu_plugin_core_ps.rs"
|
||||
required-features = ["ps"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_core_str"
|
||||
path = "src/plugins/nu_plugin_core_str.rs"
|
||||
required-features = ["str"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_core_sys"
|
||||
path = "src/plugins/nu_plugin_core_sys.rs"
|
||||
required-features = ["sys"]
|
||||
|
||||
# Main nu binary
|
||||
[[bin]]
|
||||
name = "nu"
|
||||
path = "src/main.rs"
|
||||
|
141
README.md
141
README.md
@ -1,3 +1,4 @@
|
||||
[](https://gitpod.io/#https://github.com/nushell/nushell)
|
||||
[](https://crates.io/crates/nu)
|
||||
[](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master)
|
||||
[](https://discord.gg/NtAbbGn)
|
||||
@ -6,7 +7,7 @@
|
||||
|
||||
# Nu Shell
|
||||
|
||||
A modern shell for the GitHub era.
|
||||
A new type of shell.
|
||||
|
||||

|
||||
|
||||
@ -18,12 +19,14 @@ Nu comes with a set of built-in commands (listed below). If a command is unknown
|
||||
|
||||
# Learning more
|
||||
|
||||
There are a few good resources to learn about Nu. There is a [book](https://book.nushell.sh) about Nu that is currently in progress. The book focuses on using Nu and its core concepts.
|
||||
There are a few good resources to learn about Nu. There is a [book](https://www.nushell.sh/book/) about Nu that is currently in progress. The book focuses on using Nu and its core concepts.
|
||||
|
||||
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://github.com/nushell/contributor-book/tree/master/en) to help you get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
||||
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://www.nushell.sh/contributor-book/) to help you get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
||||
|
||||
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
|
||||
|
||||
You can also find more learning resources in our [documentation](https://www.nushell.sh/documentation.html) site.
|
||||
|
||||
Try it in Gitpod.
|
||||
|
||||
[](https://gitpod.io/#https://github.com/nushell/nushell)
|
||||
@ -32,7 +35,7 @@ Try it in Gitpod.
|
||||
|
||||
## Local
|
||||
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/en/installation.html). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
||||
|
||||
To build Nu, you will need to use the **latest stable (1.39 or later)** version of the compiler.
|
||||
|
||||
@ -52,10 +55,10 @@ To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs
|
||||
cargo install nu
|
||||
```
|
||||
|
||||
You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform):
|
||||
You can also build Nu yourself with all the bells and whistles (be sure to have installed the [dependencies](https://www.nushell.sh/book/en/installation.html#dependencies) for your platform), once you have checked out this repo with git:
|
||||
|
||||
```
|
||||
cargo install nu --all-features
|
||||
cargo build --all --features=stable
|
||||
```
|
||||
|
||||
## Docker
|
||||
@ -118,7 +121,7 @@ Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing lef
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> ls | where type == "Directory" | autoview
|
||||
━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
────┬───────────┬───────────┬──────────┬────────┬──────────────┬────────────────
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
────┼───────────┼───────────┼──────────┼────────┼──────────────┼────────────────
|
||||
0 │ .azure │ Directory │ │ 4.1 KB │ 2 months ago │ a day ago
|
||||
@ -129,7 +132,7 @@ Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing lef
|
||||
5 │ src │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||
6 │ assets │ Directory │ │ 4.1 KB │ a month ago │ a month ago
|
||||
7 │ docs │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago
|
||||
━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
────┴───────────┴───────────┴──────────┴────────┴──────────────┴────────────────
|
||||
```
|
||||
|
||||
Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed. We could have also written the above:
|
||||
@ -142,15 +145,14 @@ Being able to use the same commands and compose them differently is an important
|
||||
|
||||
```text
|
||||
/home/jonathan/Source/nushell(master)> ps | where cpu > 0
|
||||
━━━┯━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━
|
||||
───┬───────┬─────────────────┬──────────┬──────────
|
||||
# │ pid │ name │ status │ cpu
|
||||
───┼───────┼─────────────────┼──────────┼──────────
|
||||
0 │ 992 │ chrome │ Sleeping │ 6.988768
|
||||
1 │ 4240 │ chrome │ Sleeping │ 5.645982
|
||||
2 │ 13973 │ qemu-system-x86 │ Sleeping │ 4.996551
|
||||
3 │ 15746 │ nu │ Sleeping │ 84.59905
|
||||
━━━┷━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━
|
||||
|
||||
───┴───────┴─────────────────┴──────────┴──────────
|
||||
```
|
||||
|
||||
## Opening files
|
||||
@ -159,29 +161,29 @@ Nu can load file and URL contents as raw text or as structured data (if it recog
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml
|
||||
━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━
|
||||
──────────────────┬────────────────┬──────────────────
|
||||
bin │ dependencies │ dev-dependencies
|
||||
──────────────────┼────────────────┼──────────────────
|
||||
[table: 12 rows] │ [table: 1 row] │ [table: 1 row]
|
||||
━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━
|
||||
──────────────────┴────────────────┴──────────────────
|
||||
```
|
||||
|
||||
We can pipeline this into a command that gets the contents of one of the columns:
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package
|
||||
━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━
|
||||
─────────────────┬────────────────────────────┬─────────┬─────────┬──────┬─────────
|
||||
authors │ description │ edition │ license │ name │ version
|
||||
─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼─────────
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.6.1
|
||||
━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.9.0
|
||||
─────────────────┴────────────────────────────┴─────────┴─────────┴──────┴─────────
|
||||
```
|
||||
|
||||
Finally, we can use commands outside of Nu once we have the data we want:
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it
|
||||
0.6.1
|
||||
0.9.0
|
||||
```
|
||||
|
||||
Here we use the variable `$it` to refer to the value being piped to the external command.
|
||||
@ -191,18 +193,19 @@ Here we use the variable `$it` to refer to the value being piped to the external
|
||||
Nu has early support for configuring the shell. It currently supports the following settings:
|
||||
|
||||
| Variable | Type | Description |
|
||||
| ------------- | ------------- | ----- |
|
||||
| --------------- | -------------------- | -------------------------------------------------------------- |
|
||||
| path | table of strings | PATH to use to find binaries |
|
||||
| env | row | the environment variables to pass to external commands |
|
||||
| ctrlc_exit | boolean | whether or not to exit Nu after multiple ctrl-c presses |
|
||||
| table_mode | "light" or other | enable lightweight or normal tables |
|
||||
| edit_mode | "vi" or "emacs" | changes line editing to "vi" or "emacs" mode |
|
||||
| completion_mode | "circular" or "list" | changes completion type to "circular" (default) or "list" mode |
|
||||
|
||||
To set one of these variables, you can use `config --set`. For example:
|
||||
|
||||
```
|
||||
> config --set [edit_mode "vi"]
|
||||
> config --set [path $nu:path]
|
||||
> config --set [path $nu.path]
|
||||
```
|
||||
|
||||
## Shells
|
||||
@ -236,106 +239,8 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
* Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
|
||||
|
||||
# Commands
|
||||
## Initial commands
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| cd path | Change to a new path |
|
||||
| cp source path | Copy files |
|
||||
| date (--utc) | Get the current datetime |
|
||||
| fetch url | Fetch contents from a url and retrieve data as a table if possible |
|
||||
| help | Display help information about commands |
|
||||
| ls (path) | View the contents of the current or given path |
|
||||
| mkdir path | Make directories, creates intermediary directories as required. |
|
||||
| mv source target | Move files or directories. |
|
||||
| open filename | Load a file into a cell, convert to table if possible (avoid by appending '--raw') |
|
||||
| post url body (--user <user>) (--password <password>) | Post content to a url and retrieve data as a table if possible |
|
||||
| ps | View current processes |
|
||||
| sys | View information about the current system |
|
||||
| which filename | Finds a program file. |
|
||||
| rm {file or directory} | Remove a file, (for removing directory append '--recursive') |
|
||||
| version | Display Nu version |
|
||||
|
||||
## Shell commands
|
||||
| command | description |
|
||||
| ------- | ----------- |
|
||||
| exit (--now) | Exit the current shell (or all shells) |
|
||||
| enter (path) | Create a new shell and begin at this path |
|
||||
| p | Go to previous shell |
|
||||
| n | Go to next shell |
|
||||
| shells | Display the list of current shells |
|
||||
|
||||
## Filters on tables (structured data)
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| append row-data | Append a row to the end of the table |
|
||||
| compact ...columns | Remove rows where given columns are empty |
|
||||
| count | Show the total number of rows |
|
||||
| default column row-data | Sets a default row's column if missing |
|
||||
| edit column-or-column-path value | Edit an existing column to have a new value |
|
||||
| embed column | Creates a new table of one column with the given name, and places the current table inside of it |
|
||||
| first amount | Show only the first number of rows |
|
||||
| format pattern | Format table row data as a string following the given pattern |
|
||||
| get column-or-column-path | Open column and get data from the corresponding cells |
|
||||
| group-by column | Creates a new table with the data from the table rows grouped by the column given |
|
||||
| histogram column ...column-names | Creates a new table with a histogram based on the column name passed in, optionally give the frequency column name
|
||||
| inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table |
|
||||
| insert column-or-column-path value | Insert a new column to the table |
|
||||
| last amount | Show only the last number of rows |
|
||||
| nth ...row-numbers | Return only the selected rows |
|
||||
| pick ...columns | Down-select table to only these columns |
|
||||
| pivot --header-row <headers> | Pivot the tables, making columns into rows and vice versa |
|
||||
| prepend row-data | Prepend a row to the beginning of the table |
|
||||
| reject ...columns | Remove the given columns from the table |
|
||||
| reverse | Reverses the table. |
|
||||
| skip amount | Skip a number of rows |
|
||||
| skip-while condition | Skips rows while the condition matches |
|
||||
| split-by column | Creates a new table with the data from the inner tables splitted by the column given |
|
||||
| sort-by ...columns | Sort by the given columns |
|
||||
| str (column) | Apply string function. Optionally use the column of a table |
|
||||
| sum | Sum a column of values |
|
||||
| tags | Read the tags (metadata) for values |
|
||||
| to-bson | Convert table into .bson binary data |
|
||||
| to-csv | Convert table into .csv text |
|
||||
| to-json | Convert table into .json text |
|
||||
| to-sqlite | Convert table to sqlite .db binary data |
|
||||
| to-toml | Convert table into .toml text |
|
||||
| to-tsv | Convert table into .tsv text |
|
||||
| to-url | Convert table to a urlencoded string |
|
||||
| to-yaml | Convert table into .yaml text |
|
||||
| where condition | Filter table to match the condition |
|
||||
|
||||
## Filters on text (unstructured data)
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| from-bson | Parse binary data as .bson and create table |
|
||||
| from-csv | Parse text as .csv and create table |
|
||||
| from-ini | Parse text as .ini and create table |
|
||||
| from-json | Parse text as .json and create table |
|
||||
| from-sqlite | Parse binary data as sqlite .db and create table |
|
||||
| from-ssv --minimum-spaces <minimum number of spaces to count as a separator> | Parse text as space-separated values and create table |
|
||||
| from-toml | Parse text as .toml and create table |
|
||||
| from-tsv | Parse text as .tsv and create table |
|
||||
| from-url | Parse urlencoded string and create a table |
|
||||
| from-xml | Parse text as .xml and create a table |
|
||||
| from-yaml | Parse text as a .yaml/.yml and create a table |
|
||||
| lines | Split single string into rows, one per line |
|
||||
| parse pattern | Convert text to a table by matching the given pattern |
|
||||
| size | Gather word count statistics on the text |
|
||||
| split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names |
|
||||
| split-row sep | Split row contents over multiple rows via the separator |
|
||||
| trim | Trim leading and following whitespace from text data |
|
||||
| {external-command} $it | Run external command with given arguments, replacing $it with each row text |
|
||||
|
||||
## Consuming commands
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| autoview | View the contents of the pipeline as a table or list |
|
||||
| binaryview | Autoview of binary data (optional feature) |
|
||||
| clip | Copy the contents of the pipeline to the copy/paste buffer (optional feature) |
|
||||
| save filename | Save the contents of the pipeline to a file |
|
||||
| table | View the contents of the pipeline as a table |
|
||||
| textview | Autoview of text data |
|
||||
| tree | View the contents of the pipeline as a tree (optional feature) |
|
||||
You can find a list of Nu commands, complete with documentation, in [quick command references](https://www.nushell.sh/documentation.html#quick-command-references).
|
||||
|
||||
# License
|
||||
|
||||
|
12
TODO.md
12
TODO.md
@ -46,3 +46,15 @@ Unify dictionary building, probably around a macro
|
||||
sys plugin in own crate
|
||||
|
||||
textview in own crate
|
||||
|
||||
Combine atomic and atomic_parse in parser
|
||||
|
||||
at_end_possible_ws needs to be comment and separator sensitive
|
||||
|
||||
Eliminate unnecessary `nodes` parser
|
||||
|
||||
#[derive(HasSpan)]
|
||||
|
||||
Figure out a solution for the duplication in stuff like NumberShape vs. NumberExpressionShape
|
||||
|
||||
use `struct Expander` from signature.rs
|
38
build.rs
38
build.rs
@ -1,39 +1,3 @@
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Feature {
|
||||
#[allow(unused)]
|
||||
description: String,
|
||||
enabled: bool,
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let input = env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||
let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok();
|
||||
let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS")
|
||||
.map(|s| s.split(",").map(|s| s.to_string()).collect())
|
||||
.unwrap_or_else(|_| HashSet::new());
|
||||
|
||||
if all_on && !flags.is_empty() {
|
||||
println!(
|
||||
"cargo:warning={}",
|
||||
"Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both."
|
||||
);
|
||||
}
|
||||
|
||||
let path = Path::new(&input).join("features.toml");
|
||||
|
||||
let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?;
|
||||
|
||||
for (key, value) in toml.iter() {
|
||||
if value.enabled == true || all_on || flags.contains(key) {
|
||||
println!("cargo:rustc-cfg={}", key);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
nu_build::build()
|
||||
}
|
||||
|
16
crates/nu-build/Cargo.toml
Normal file
16
crates/nu-build/Cargo.toml
Normal file
@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "nu-build"
|
||||
version = "0.9.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core build system for nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
lazy_static = "1.4.0"
|
||||
serde_json = "1.0.44"
|
||||
toml = "0.5.5"
|
80
crates/nu-build/src/lib.rs
Normal file
80
crates/nu-build/src/lib.rs
Normal file
@ -0,0 +1,80 @@
|
||||
use lazy_static::lazy_static;
|
||||
use serde::Deserialize;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
|
||||
lazy_static! {
|
||||
static ref WORKSPACES: Mutex<BTreeMap<String, &'static Path>> = Mutex::new(BTreeMap::new());
|
||||
}
|
||||
|
||||
// got from https://github.com/mitsuhiko/insta/blob/b113499249584cb650150d2d01ed96ee66db6b30/src/runtime.rs#L67-L88
|
||||
|
||||
fn get_cargo_workspace(manifest_dir: &str) -> Result<Option<&Path>, Box<dyn std::error::Error>> {
|
||||
let mut workspaces = WORKSPACES.lock()?;
|
||||
if let Some(rv) = workspaces.get(manifest_dir) {
|
||||
Ok(Some(rv))
|
||||
} else {
|
||||
#[derive(Deserialize)]
|
||||
struct Manifest {
|
||||
workspace_root: String,
|
||||
}
|
||||
let output = std::process::Command::new(env!("CARGO"))
|
||||
.arg("metadata")
|
||||
.arg("--format-version=1")
|
||||
.current_dir(manifest_dir)
|
||||
.output()?;
|
||||
let manifest: Manifest = serde_json::from_slice(&output.stdout)?;
|
||||
let path = Box::leak(Box::new(PathBuf::from(manifest.workspace_root)));
|
||||
workspaces.insert(manifest_dir.to_string(), path.as_path());
|
||||
Ok(workspaces.get(manifest_dir).cloned())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Feature {
|
||||
#[allow(unused)]
|
||||
description: String,
|
||||
enabled: bool,
|
||||
}
|
||||
|
||||
pub fn build() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let input = env::var("CARGO_MANIFEST_DIR")?;
|
||||
|
||||
let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok();
|
||||
let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS")
|
||||
.map(|s| s.split(',').map(|s| s.to_string()).collect())
|
||||
.unwrap_or_else(|_| HashSet::new());
|
||||
|
||||
if all_on && !flags.is_empty() {
|
||||
println!(
|
||||
"cargo:warning=Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both."
|
||||
);
|
||||
}
|
||||
|
||||
let workspace = match get_cargo_workspace(&input)? {
|
||||
// If the crate is being downloaded from crates.io, it won't have a workspace root, and that's ok
|
||||
None => return Ok(()),
|
||||
Some(workspace) => workspace,
|
||||
};
|
||||
|
||||
let path = Path::new(&workspace).join("features.toml");
|
||||
|
||||
// If the crate is being downloaded from crates.io, it won't have a features.toml, and that's ok
|
||||
if !path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?;
|
||||
|
||||
for (key, value) in toml.iter() {
|
||||
if value.enabled || all_on || flags.contains(key) {
|
||||
println!("cargo:rustc-cfg={}", key);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
32
crates/nu-errors/Cargo.toml
Normal file
32
crates/nu-errors/Cargo.toml
Normal file
@ -0,0 +1,32 @@
|
||||
[package]
|
||||
name = "nu-errors"
|
||||
version = "0.9.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core error subsystem for Nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||
|
||||
ansi_term = "0.12.1"
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
language-reporting = "0.4.0"
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
num-traits = "0.2.10"
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
nom = "5.0.1"
|
||||
nom_locate = "1.0.0"
|
||||
getset = "0.0.9"
|
||||
|
||||
# implement conversions
|
||||
serde_yaml = "0.8"
|
||||
toml = "0.5.5"
|
||||
serde_json = "1.0.44"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.9.0", path = "../nu-build" }
|
3
crates/nu-errors/build.rs
Normal file
3
crates/nu-errors/build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
@ -1,82 +1,95 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use ansi_term::Color;
|
||||
use bigdecimal::BigDecimal;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use language_reporting::{Diagnostic, Label, Severity};
|
||||
use nu_source::{Spanned, TracableContext};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasFallibleSpan, PrettyDebug, Span, Spanned, SpannedItem, TracableContext,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::ToPrimitive;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
use std::ops::Range;
|
||||
|
||||
// TODO: Spanned<T> -> HasSpanAndItem<T> ?
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Description {
|
||||
Source(Spanned<String>),
|
||||
Synthetic(String),
|
||||
}
|
||||
|
||||
impl Description {
|
||||
fn from_spanned(item: Spanned<impl Into<String>>) -> Description {
|
||||
Description::Source(item.map(|s| s.into()))
|
||||
}
|
||||
|
||||
fn into_label(self) -> Result<Label<Span>, String> {
|
||||
match self {
|
||||
Description::Source(s) => Ok(Label::new_primary(s.span).with_message(s.item)),
|
||||
Description::Synthetic(s) => Err(s),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for Description {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
Description::Source(s) => b::description(&s.item),
|
||||
Description::Synthetic(s) => b::description(s),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
/// A structured reason for a ParseError. Note that parsing in nu is more like macro expansion in
|
||||
/// other languages, so the kinds of errors that can occur during parsing are more contextual than
|
||||
/// you might expect.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum ParseErrorReason {
|
||||
Eof {
|
||||
expected: &'static str,
|
||||
span: Span,
|
||||
},
|
||||
/// The parser encountered an EOF rather than what it was expecting
|
||||
Eof { expected: String, span: Span },
|
||||
/// The parser expected to see the end of a token stream (possibly the token
|
||||
/// stream from inside a delimited token node), but found something else.
|
||||
ExtraTokens { actual: Spanned<String> },
|
||||
/// The parser encountered something other than what it was expecting
|
||||
Mismatch {
|
||||
expected: &'static str,
|
||||
expected: String,
|
||||
actual: Spanned<String>,
|
||||
},
|
||||
|
||||
/// An unexpected internal error has occurred
|
||||
InternalError { message: Spanned<String> },
|
||||
|
||||
/// The parser tried to parse an argument for a command, but it failed for
|
||||
/// some reason
|
||||
ArgumentError {
|
||||
command: Spanned<String>,
|
||||
error: ArgumentError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
/// A newtype for `ParseErrorReason`
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Getters)]
|
||||
pub struct ParseError {
|
||||
#[get = "pub"]
|
||||
reason: ParseErrorReason,
|
||||
}
|
||||
|
||||
impl ParseError {
|
||||
pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError {
|
||||
/// Construct a [ParseErrorReason::Eof](ParseErrorReason::Eof)
|
||||
pub fn unexpected_eof(expected: impl Into<String>, span: Span) -> ParseError {
|
||||
ParseError {
|
||||
reason: ParseErrorReason::Eof { expected, span },
|
||||
reason: ParseErrorReason::Eof {
|
||||
expected: expected.into(),
|
||||
span,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mismatch(expected: &'static str, actual: Spanned<impl Into<String>>) -> ParseError {
|
||||
/// Construct a [ParseErrorReason::ExtraTokens](ParseErrorReason::ExtraTokens)
|
||||
pub fn extra_tokens(actual: Spanned<impl Into<String>>) -> ParseError {
|
||||
let Spanned { span, item } = actual;
|
||||
|
||||
ParseError {
|
||||
reason: ParseErrorReason::Mismatch {
|
||||
expected,
|
||||
reason: ParseErrorReason::ExtraTokens {
|
||||
actual: item.into().spanned(span),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a [ParseErrorReason::Mismatch](ParseErrorReason::Mismatch)
|
||||
pub fn mismatch(expected: impl Into<String>, actual: Spanned<impl Into<String>>) -> ParseError {
|
||||
let Spanned { span, item } = actual;
|
||||
|
||||
ParseError {
|
||||
reason: ParseErrorReason::Mismatch {
|
||||
expected: expected.into(),
|
||||
actual: item.into().spanned(span),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a [ParseErrorReason::InternalError](ParseErrorReason::InternalError)
|
||||
pub fn internal_error(message: Spanned<impl Into<String>>) -> ParseError {
|
||||
ParseError {
|
||||
reason: ParseErrorReason::InternalError {
|
||||
message: message.item.into().spanned(message.span),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a [ParseErrorReason::ArgumentError](ParseErrorReason::ArgumentError)
|
||||
pub fn argument_error(command: Spanned<impl Into<String>>, kind: ArgumentError) -> ParseError {
|
||||
ParseError {
|
||||
reason: ParseErrorReason::ArgumentError {
|
||||
@ -87,13 +100,20 @@ impl ParseError {
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a [ParseError](ParseError) into a [ShellError](ShellError)
|
||||
impl From<ParseError> for ShellError {
|
||||
fn from(error: ParseError) -> ShellError {
|
||||
match error.reason {
|
||||
ParseErrorReason::Eof { expected, span } => ShellError::unexpected_eof(expected, span),
|
||||
ParseErrorReason::ExtraTokens { actual } => ShellError::type_error("nothing", actual),
|
||||
ParseErrorReason::Mismatch { actual, expected } => {
|
||||
ShellError::type_error(expected, actual.clone())
|
||||
ShellError::type_error(expected, actual)
|
||||
}
|
||||
ParseErrorReason::InternalError { message } => ShellError::labeled_error(
|
||||
format!("Internal error: {}", message.item),
|
||||
&message.item,
|
||||
&message.span,
|
||||
),
|
||||
ParseErrorReason::ArgumentError { command, error } => {
|
||||
ShellError::argument_error(command, error)
|
||||
}
|
||||
@ -101,11 +121,20 @@ impl From<ParseError> for ShellError {
|
||||
}
|
||||
}
|
||||
|
||||
/// ArgumentError describes various ways that the parser could fail because of unexpected arguments.
|
||||
/// Nu commands are like a combination of functions and macros, and these errors correspond to
|
||||
/// problems that could be identified during expansion based on the syntactic signature of a
|
||||
/// command.
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, Hash, PartialOrd, Serialize, Deserialize)]
|
||||
pub enum ArgumentError {
|
||||
/// The command specified a mandatory flag, but it was missing.
|
||||
MissingMandatoryFlag(String),
|
||||
/// The command specified a mandatory positional argument, but it was missing.
|
||||
MissingMandatoryPositional(String),
|
||||
/// A flag was found, and it should have been followed by a value, but no value was found
|
||||
MissingValueForName(String),
|
||||
/// A sequence of characters was found that was not syntactically valid (but would have
|
||||
/// been valid if the command was an external command)
|
||||
InvalidExternalWord,
|
||||
}
|
||||
|
||||
@ -132,12 +161,16 @@ impl PrettyDebug for ArgumentError {
|
||||
}
|
||||
}
|
||||
|
||||
/// A `ShellError` is a proximate error and a possible cause, which could have its own cause,
|
||||
/// creating a cause chain.
|
||||
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Serialize, Deserialize, Hash)]
|
||||
pub struct ShellError {
|
||||
error: ProximateShellError,
|
||||
cause: Option<Box<ProximateShellError>>,
|
||||
cause: Option<Box<ShellError>>,
|
||||
}
|
||||
|
||||
/// `PrettyDebug` is for internal debugging. For user-facing debugging, [into_diagnostic](ShellError::into_diagnostic)
|
||||
/// is used, which prints an error, highlighting spans.
|
||||
impl PrettyDebug for ShellError {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match &self.error {
|
||||
@ -159,7 +192,7 @@ impl PrettyDebug for ShellError {
|
||||
+ b::space()
|
||||
+ b::description("actual:")
|
||||
+ b::space()
|
||||
+ b::option(actual.item.as_ref().map(|actual| b::description(actual))),
|
||||
+ b::option(actual.item.as_ref().map(b::description)),
|
||||
")",
|
||||
)
|
||||
}
|
||||
@ -170,12 +203,12 @@ impl PrettyDebug for ShellError {
|
||||
"(",
|
||||
b::description("expr:")
|
||||
+ b::space()
|
||||
+ expr.pretty()
|
||||
+ b::description(&expr.item)
|
||||
+ b::description(",")
|
||||
+ b::space()
|
||||
+ b::description("subpath:")
|
||||
+ b::space()
|
||||
+ subpath.pretty(),
|
||||
+ b::description(&subpath.item),
|
||||
")",
|
||||
)
|
||||
}
|
||||
@ -184,7 +217,7 @@ impl PrettyDebug for ShellError {
|
||||
+ b::space()
|
||||
+ b::delimit(
|
||||
"(",
|
||||
b::description("subpath:") + b::space() + subpath.pretty(),
|
||||
b::description("subpath:") + b::space() + b::description(&subpath.item),
|
||||
")",
|
||||
)
|
||||
}
|
||||
@ -278,6 +311,7 @@ impl serde::de::Error for ShellError {
|
||||
}
|
||||
|
||||
impl ShellError {
|
||||
/// An error that describes a mismatch between the given type and the expected type
|
||||
pub fn type_error(
|
||||
expected: impl Into<String>,
|
||||
actual: Spanned<impl Into<String>>,
|
||||
@ -294,8 +328,8 @@ impl ShellError {
|
||||
expr: Spanned<impl Into<String>>,
|
||||
) -> ShellError {
|
||||
ProximateShellError::MissingProperty {
|
||||
subpath: Description::from_spanned(subpath),
|
||||
expr: Description::from_spanned(expr),
|
||||
subpath: subpath.map(|s| s.into()),
|
||||
expr: expr.map(|e| e.into()),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
@ -305,7 +339,7 @@ impl ShellError {
|
||||
integer: impl Into<Span>,
|
||||
) -> ShellError {
|
||||
ProximateShellError::InvalidIntegerIndex {
|
||||
subpath: Description::from_spanned(subpath),
|
||||
subpath: subpath.map(|s| s.into()),
|
||||
integer: integer.into(),
|
||||
}
|
||||
.start()
|
||||
@ -318,7 +352,7 @@ impl ShellError {
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn unexpected_eof(expected: impl Into<String>, span: impl Into<Span>) -> ShellError {
|
||||
pub fn unexpected_eof(expected: impl Into<String>, span: impl Into<Span>) -> ShellError {
|
||||
ProximateShellError::UnexpectedEof {
|
||||
expected: expected.into(),
|
||||
span: span.into(),
|
||||
@ -326,7 +360,7 @@ impl ShellError {
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn range_error(
|
||||
pub fn range_error(
|
||||
expected: impl Into<ExpectedRange>,
|
||||
actual: &Spanned<impl fmt::Debug>,
|
||||
operation: impl Into<String>,
|
||||
@ -339,14 +373,14 @@ impl ShellError {
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn syntax_error(problem: Spanned<impl Into<String>>) -> ShellError {
|
||||
pub fn syntax_error(problem: Spanned<impl Into<String>>) -> ShellError {
|
||||
ProximateShellError::SyntaxError {
|
||||
problem: problem.map(|p| p.into()),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn coerce_error(
|
||||
pub fn coerce_error(
|
||||
left: Spanned<impl Into<String>>,
|
||||
right: Spanned<impl Into<String>>,
|
||||
) -> ShellError {
|
||||
@ -357,10 +391,7 @@ impl ShellError {
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn argument_error(
|
||||
command: Spanned<impl Into<String>>,
|
||||
kind: ArgumentError,
|
||||
) -> ShellError {
|
||||
pub fn argument_error(command: Spanned<impl Into<String>>, kind: ArgumentError) -> ShellError {
|
||||
ProximateShellError::ArgumentError {
|
||||
command: command.map(|c| c.into()),
|
||||
error: kind,
|
||||
@ -368,7 +399,7 @@ impl ShellError {
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn parse_error(
|
||||
pub fn parse_error(
|
||||
error: nom::Err<(
|
||||
nom_locate::LocatedSpanEx<&str, TracableContext>,
|
||||
nom::error::ErrorKind,
|
||||
@ -381,13 +412,13 @@ impl ShellError {
|
||||
// TODO: Get span of EOF
|
||||
let diagnostic = Diagnostic::new(
|
||||
Severity::Error,
|
||||
format!("Parse Error: Unexpected end of line"),
|
||||
"Parse Error: Unexpected end of line".to_string(),
|
||||
);
|
||||
|
||||
ShellError::diagnostic(diagnostic)
|
||||
}
|
||||
nom::Err::Failure(span) | nom::Err::Error(span) => {
|
||||
let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error"))
|
||||
let diagnostic = Diagnostic::new(Severity::Error, "Parse Error".to_string())
|
||||
.with_label(Label::new_primary(Span::from(span.0)));
|
||||
|
||||
ShellError::diagnostic(diagnostic)
|
||||
@ -395,11 +426,11 @@ impl ShellError {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn diagnostic(diagnostic: Diagnostic<Span>) -> ShellError {
|
||||
pub fn diagnostic(diagnostic: Diagnostic<Span>) -> ShellError {
|
||||
ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start()
|
||||
}
|
||||
|
||||
pub(crate) fn to_diagnostic(self) -> Diagnostic<Span> {
|
||||
pub fn into_diagnostic(self) -> Diagnostic<Span> {
|
||||
match self.error {
|
||||
ProximateShellError::MissingValue { span, reason } => {
|
||||
let mut d = Diagnostic::new(
|
||||
@ -419,15 +450,15 @@ impl ShellError {
|
||||
} => match error {
|
||||
ArgumentError::InvalidExternalWord => Diagnostic::new(
|
||||
Severity::Error,
|
||||
format!("Invalid bare word for Nu command (did you intend to invoke an external command?)"))
|
||||
"Invalid bare word for Nu command (did you intend to invoke an external command?)".to_string())
|
||||
.with_label(Label::new_primary(command.span)),
|
||||
ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new(
|
||||
Severity::Error,
|
||||
format!(
|
||||
"{} requires {}{}",
|
||||
Color::Cyan.paint(&command.item),
|
||||
Color::Black.bold().paint("--"),
|
||||
Color::Black.bold().paint(name)
|
||||
Color::Green.bold().paint("--"),
|
||||
Color::Green.bold().paint(name)
|
||||
),
|
||||
)
|
||||
.with_label(Label::new_primary(command.span)),
|
||||
@ -447,8 +478,8 @@ impl ShellError {
|
||||
format!(
|
||||
"{} is missing value for flag {}{}",
|
||||
Color::Cyan.paint(&command.item),
|
||||
Color::Black.bold().paint("--"),
|
||||
Color::Black.bold().paint(name)
|
||||
Color::Green.bold().paint("--"),
|
||||
Color::Green.bold().paint(name)
|
||||
),
|
||||
)
|
||||
.with_label(Label::new_primary(command.span)),
|
||||
@ -476,7 +507,7 @@ impl ShellError {
|
||||
|
||||
ProximateShellError::UnexpectedEof {
|
||||
expected, span
|
||||
} => Diagnostic::new(Severity::Error, format!("Unexpected end of input"))
|
||||
} => Diagnostic::new(Severity::Error, "Unexpected end of input".to_string())
|
||||
.with_label(Label::new_primary(span).with_message(format!("Expected {}", expected))),
|
||||
|
||||
ProximateShellError::RangeError {
|
||||
@ -491,7 +522,7 @@ impl ShellError {
|
||||
Label::new_primary(span).with_message(format!(
|
||||
"Expected to convert {} to {} while {}, but it was out of range",
|
||||
item,
|
||||
kind.desc(),
|
||||
kind.display(),
|
||||
operation
|
||||
)),
|
||||
),
|
||||
@ -506,31 +537,33 @@ impl ShellError {
|
||||
.with_label(Label::new_primary(span).with_message(item)),
|
||||
|
||||
ProximateShellError::MissingProperty { subpath, expr, .. } => {
|
||||
let subpath = subpath.into_label();
|
||||
let expr = expr.into_label();
|
||||
|
||||
let mut diag = Diagnostic::new(Severity::Error, "Missing property");
|
||||
|
||||
match subpath {
|
||||
Ok(label) => diag = diag.with_label(label),
|
||||
Err(ty) => diag.message = format!("Missing property (for {})", ty),
|
||||
if subpath.span == Span::unknown() {
|
||||
diag.message = format!("Missing property (for {})", subpath.item);
|
||||
} else {
|
||||
let subpath = Label::new_primary(subpath.span).with_message(subpath.item);
|
||||
diag = diag.with_label(subpath);
|
||||
|
||||
if expr.span != Span::unknown() {
|
||||
let expr = Label::new_primary(expr.span).with_message(expr.item);
|
||||
diag = diag.with_label(expr)
|
||||
}
|
||||
|
||||
if let Ok(label) = expr {
|
||||
diag = diag.with_label(label);
|
||||
}
|
||||
|
||||
diag
|
||||
}
|
||||
|
||||
ProximateShellError::InvalidIntegerIndex { subpath,integer } => {
|
||||
let subpath = subpath.into_label();
|
||||
|
||||
let mut diag = Diagnostic::new(Severity::Error, "Invalid integer property");
|
||||
|
||||
match subpath {
|
||||
Ok(label) => diag = diag.with_label(label),
|
||||
Err(ty) => diag.message = format!("Invalid integer property (for {})", ty)
|
||||
if subpath.span == Span::unknown() {
|
||||
diag.message = format!("Invalid integer property (for {})", subpath.item)
|
||||
} else {
|
||||
let label = Label::new_primary(subpath.span).with_message(subpath.item);
|
||||
diag = diag.with_label(label)
|
||||
}
|
||||
|
||||
diag = diag.with_label(Label::new_secondary(integer).with_message("integer"));
|
||||
@ -579,23 +612,19 @@ impl ShellError {
|
||||
)
|
||||
}
|
||||
|
||||
// pub fn string(title: impl Into<String>) -> ShellError {
|
||||
// ProximateShellError::String(StringError::new(title.into(), String::new())).start()
|
||||
// }
|
||||
//
|
||||
// pub(crate) fn unreachable(title: impl Into<String>) -> ShellError {
|
||||
// ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into()))
|
||||
// }
|
||||
|
||||
pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError {
|
||||
pub fn unimplemented(title: impl Into<String>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
|
||||
}
|
||||
|
||||
pub(crate) fn unexpected(title: impl Into<String>) -> ShellError {
|
||||
pub fn unexpected(title: impl Into<String>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into()))
|
||||
}
|
||||
}
|
||||
|
||||
/// `ExpectedRange` describes a range of values that was expected by a command. In addition
|
||||
/// to typical ranges, this enum allows an error to specify that the range of allowed values
|
||||
/// corresponds to a particular numeric type (which is a dominant use-case for the
|
||||
/// [RangeError](ProximateShellError::RangeError) error type).
|
||||
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Serialize, Deserialize)]
|
||||
pub enum ExpectedRange {
|
||||
I8,
|
||||
@ -617,6 +646,7 @@ pub enum ExpectedRange {
|
||||
Range { start: usize, end: usize },
|
||||
}
|
||||
|
||||
/// Convert a Rust range into an [ExpectedRange](ExpectedRange).
|
||||
impl From<Range<usize>> for ExpectedRange {
|
||||
fn from(range: Range<usize>) -> Self {
|
||||
ExpectedRange::Range {
|
||||
@ -628,13 +658,7 @@ impl From<Range<usize>> for ExpectedRange {
|
||||
|
||||
impl PrettyDebug for ExpectedRange {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::description(self.desc())
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpectedRange {
|
||||
fn desc(&self) -> String {
|
||||
match self {
|
||||
b::description(match self {
|
||||
ExpectedRange::I8 => "an 8-bit signed integer",
|
||||
ExpectedRange::I16 => "a 16-bit signed integer",
|
||||
ExpectedRange::I32 => "a 32-bit signed integer",
|
||||
@ -651,9 +675,10 @@ impl ExpectedRange {
|
||||
ExpectedRange::Size => "a list offset",
|
||||
ExpectedRange::BigDecimal => "a decimal",
|
||||
ExpectedRange::BigInt => "an integer",
|
||||
ExpectedRange::Range { start, end } => return format!("{} to {}", start, end),
|
||||
ExpectedRange::Range { start, end } => {
|
||||
return b::description(format!("{} to {}", start, end))
|
||||
}
|
||||
.to_string()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -671,11 +696,11 @@ pub enum ProximateShellError {
|
||||
actual: Spanned<Option<String>>,
|
||||
},
|
||||
MissingProperty {
|
||||
subpath: Description,
|
||||
expr: Description,
|
||||
subpath: Spanned<String>,
|
||||
expr: Spanned<String>,
|
||||
},
|
||||
InvalidIntegerIndex {
|
||||
subpath: Description,
|
||||
subpath: Spanned<String>,
|
||||
integer: Span,
|
||||
},
|
||||
MissingValue {
|
||||
@ -710,6 +735,30 @@ impl ProximateShellError {
|
||||
}
|
||||
}
|
||||
|
||||
impl HasFallibleSpan for ShellError {
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
self.error.maybe_span()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasFallibleSpan for ProximateShellError {
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
Some(match self {
|
||||
ProximateShellError::SyntaxError { problem } => problem.span,
|
||||
ProximateShellError::UnexpectedEof { span, .. } => *span,
|
||||
ProximateShellError::TypeError { actual, .. } => actual.span,
|
||||
ProximateShellError::MissingProperty { subpath, .. } => subpath.span,
|
||||
ProximateShellError::InvalidIntegerIndex { subpath, .. } => subpath.span,
|
||||
ProximateShellError::MissingValue { span, .. } => return *span,
|
||||
ProximateShellError::ArgumentError { command, .. } => command.span,
|
||||
ProximateShellError::RangeError { actual_kind, .. } => actual_kind.span,
|
||||
ProximateShellError::Diagnostic(_) => return None,
|
||||
ProximateShellError::CoerceError { left, right } => left.span.until(right.span),
|
||||
ProximateShellError::UntaggedRuntimeError { .. } => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ShellDiagnostic {
|
||||
pub(crate) diagnostic: Diagnostic<Span>,
|
||||
@ -772,12 +821,6 @@ impl std::convert::From<std::io::Error> for ShellError {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<subprocess::PopenError> for ShellError {
|
||||
fn from(input: subprocess::PopenError) -> ShellError {
|
||||
ShellError::untagged_runtime_error(format!("{}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<serde_yaml::Error> for ShellError {
|
||||
fn from(input: serde_yaml::Error) -> ShellError {
|
||||
ShellError::untagged_runtime_error(format!("{:?}", input))
|
13
crates/nu-macros/Cargo.toml
Normal file
13
crates/nu-macros/Cargo.toml
Normal file
@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "nu-macros"
|
||||
version = "0.9.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core macros for building Nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
25
crates/nu-macros/src/lib.rs
Normal file
25
crates/nu-macros/src/lib.rs
Normal file
@ -0,0 +1,25 @@
|
||||
#[macro_export]
|
||||
macro_rules! signature {
|
||||
(def $name:tt {
|
||||
$usage:tt
|
||||
$(
|
||||
$positional_name:tt $positional_ty:tt - $positional_desc:tt
|
||||
)*
|
||||
}) => {{
|
||||
let signature = Signature::new(stringify!($name)).desc($usage);
|
||||
$(
|
||||
$crate::positional! { signature, $positional_name $positional_ty - $positional_desc }
|
||||
)*
|
||||
signature
|
||||
}};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! positional {
|
||||
($ident:tt, $name:tt (optional $shape:tt) - $desc:tt) => {
|
||||
let $ident = $ident.optional(stringify!($name), SyntaxShape::$shape, $desc);
|
||||
};
|
||||
($ident:tt, $name:tt ($shape:tt)- $desc:tt) => {
|
||||
let $ident = $ident.required(stringify!($name), SyntaxShape::$shape, $desc);
|
||||
};
|
||||
}
|
48
crates/nu-parser/Cargo.toml
Normal file
48
crates/nu-parser/Cargo.toml
Normal file
@ -0,0 +1,48 @@
|
||||
[package]
|
||||
name = "nu-parser"
|
||||
version = "0.9.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core parser used in Nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||
|
||||
pretty_env_logger = "0.3.1"
|
||||
pretty = "0.5.2"
|
||||
termcolor = "1.0.5"
|
||||
log = "0.4.8"
|
||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||
serde = { version = "1.0.102", features = ["derive"] }
|
||||
nom = "5.0.1"
|
||||
nom_locate = "1.0.0"
|
||||
nom-tracable = "0.4.1"
|
||||
num-traits = "0.2.8"
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
getset = "0.0.9"
|
||||
cfg-if = "0.1"
|
||||
itertools = "0.8.1"
|
||||
shellexpand = "1.0.0"
|
||||
ansi_term = "0.12.1"
|
||||
ptree = {version = "0.2" }
|
||||
language-reporting = "0.4.0"
|
||||
unicode-xid = "0.2.0"
|
||||
enumflags2 = "0.6.2"
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.6.1"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||
|
||||
[features]
|
||||
stable = []
|
||||
trace = ["nom-tracable/trace"]
|
3
crates/nu-parser/build.rs
Normal file
3
crates/nu-parser/build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
34
crates/nu-parser/src/commands.rs
Normal file
34
crates/nu-parser/src/commands.rs
Normal file
@ -0,0 +1,34 @@
|
||||
pub mod classified;
|
||||
|
||||
use crate::commands::classified::external::{ExternalArg, ExternalArgs, ExternalCommand};
|
||||
use crate::commands::classified::ClassifiedCommand;
|
||||
use crate::hir::expand_external_tokens::ExternalTokensShape;
|
||||
use crate::hir::tokens_iterator::TokensIterator;
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::{Spanned, Tagged};
|
||||
|
||||
// Classify this command as an external command, which doesn't give special meaning
|
||||
// to nu syntactic constructs, and passes all arguments to the external command as
|
||||
// strings.
|
||||
pub(crate) fn external_command(
|
||||
tokens: &mut TokensIterator,
|
||||
name: Tagged<&str>,
|
||||
) -> Result<ClassifiedCommand, ParseError> {
|
||||
let Spanned { item, span } = tokens.expand_infallible(ExternalTokensShape).tokens;
|
||||
let full_span = name.span().until(span);
|
||||
|
||||
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||
name: name.to_string(),
|
||||
name_tag: name.tag(),
|
||||
args: ExternalArgs {
|
||||
list: item
|
||||
.iter()
|
||||
.map(|x| ExternalArg {
|
||||
tag: x.span.into(),
|
||||
arg: x.item.clone(),
|
||||
})
|
||||
.collect(),
|
||||
span: full_span,
|
||||
},
|
||||
}))
|
||||
}
|
100
crates/nu-parser/src/commands/classified.rs
Normal file
100
crates/nu-parser/src/commands/classified.rs
Normal file
@ -0,0 +1,100 @@
|
||||
pub mod external;
|
||||
pub mod internal;
|
||||
|
||||
use crate::commands::classified::external::ExternalCommand;
|
||||
use crate::commands::classified::internal::InternalCommand;
|
||||
use crate::hir;
|
||||
use crate::parse::token_tree::SpannedToken;
|
||||
use derive_new::new;
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum ClassifiedCommand {
|
||||
#[allow(unused)]
|
||||
Expr(SpannedToken),
|
||||
#[allow(unused)]
|
||||
Dynamic(hir::Call),
|
||||
Internal(InternalCommand),
|
||||
External(ExternalCommand),
|
||||
Error(ParseError),
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ClassifiedCommand {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
ClassifiedCommand::Expr(token) => b::typed("command", token.pretty_debug(source)),
|
||||
ClassifiedCommand::Dynamic(call) => b::typed("command", call.pretty_debug(source)),
|
||||
ClassifiedCommand::Error(_) => b::error("no command"),
|
||||
ClassifiedCommand::Internal(internal) => internal.pretty_debug(source),
|
||||
ClassifiedCommand::External(external) => external.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ClassifiedCommand {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
ClassifiedCommand::Expr(node) => node.span(),
|
||||
ClassifiedCommand::Internal(command) => command.span(),
|
||||
ClassifiedCommand::Dynamic(call) => call.span,
|
||||
ClassifiedCommand::Error(_) => Span::unknown(),
|
||||
ClassifiedCommand::External(command) => command.span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
pub(crate) struct DynamicCommand {
|
||||
pub(crate) args: hir::Call,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Commands {
|
||||
pub list: Vec<ClassifiedCommand>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl std::ops::Deref for Commands {
|
||||
type Target = [ClassifiedCommand];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.list
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ClassifiedPipeline {
|
||||
pub commands: Commands,
|
||||
// this is not a Result to make it crystal clear that these shapes
|
||||
// aren't intended to be used directly with `?`
|
||||
pub failed: Option<nu_errors::ParseError>,
|
||||
}
|
||||
|
||||
impl ClassifiedPipeline {
|
||||
pub fn commands(list: Vec<ClassifiedCommand>, span: impl Into<Span>) -> ClassifiedPipeline {
|
||||
ClassifiedPipeline {
|
||||
commands: Commands {
|
||||
list,
|
||||
span: span.into(),
|
||||
},
|
||||
failed: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ClassifiedPipeline {
|
||||
fn span(&self) -> Span {
|
||||
self.commands.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ClassifiedPipeline {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.commands.iter().map(|c| c.pretty_debug(source)),
|
||||
b::operator(" | "),
|
||||
)
|
||||
.or(b::delimit("<", b::description("empty pipeline"), ">"))
|
||||
}
|
||||
}
|
81
crates/nu-parser/src/commands/classified/external.rs
Normal file
81
crates/nu-parser/src/commands/classified/external.rs
Normal file
@ -0,0 +1,81 @@
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Tag};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct ExternalArg {
|
||||
pub arg: String,
|
||||
pub tag: Tag,
|
||||
}
|
||||
|
||||
impl ExternalArg {
|
||||
pub fn has(&self, name: &str) -> bool {
|
||||
self.arg == name
|
||||
}
|
||||
|
||||
pub fn is_it(&self) -> bool {
|
||||
self.has("$it")
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for ExternalArg {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &str {
|
||||
&self.arg
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct ExternalArgs {
|
||||
pub list: Vec<ExternalArg>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl ExternalArgs {
|
||||
pub fn iter(&self) -> impl Iterator<Item = &ExternalArg> {
|
||||
self.list.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for ExternalArgs {
|
||||
type Target = [ExternalArg];
|
||||
|
||||
fn deref(&self) -> &[ExternalArg] {
|
||||
&self.list
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct ExternalCommand {
|
||||
pub name: String,
|
||||
|
||||
pub name_tag: Tag,
|
||||
pub args: ExternalArgs,
|
||||
}
|
||||
|
||||
impl ExternalCommand {
|
||||
pub fn has_it_argument(&self) -> bool {
|
||||
self.args.iter().any(|arg| arg.has("$it"))
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for ExternalCommand {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"external command",
|
||||
b::description(&self.name)
|
||||
+ b::preceded(
|
||||
b::space(),
|
||||
b::intersperse(
|
||||
self.args.iter().map(|a| b::primitive(a.arg.to_string())),
|
||||
b::space(),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ExternalCommand {
|
||||
fn span(&self) -> Span {
|
||||
self.name_tag.span.until(self.args.span)
|
||||
}
|
||||
}
|
28
crates/nu-parser/src/commands/classified/internal.rs
Normal file
28
crates/nu-parser/src/commands/classified/internal.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use crate::hir;
|
||||
|
||||
use derive_new::new;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Tag};
|
||||
|
||||
#[derive(new, Debug, Clone, Eq, PartialEq)]
|
||||
pub struct InternalCommand {
|
||||
pub name: String,
|
||||
pub name_tag: Tag,
|
||||
pub args: hir::Call,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for InternalCommand {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"internal command",
|
||||
b::description(&self.name) + b::space() + self.args.pretty_debug(source),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for InternalCommand {
|
||||
fn span(&self) -> Span {
|
||||
let start = self.name_tag.span;
|
||||
|
||||
start.until(self.args.span)
|
||||
}
|
||||
}
|
486
crates/nu-parser/src/hir.rs
Normal file
486
crates/nu-parser/src/hir.rs
Normal file
@ -0,0 +1,486 @@
|
||||
pub(crate) mod baseline_parse;
|
||||
pub(crate) mod binary;
|
||||
pub(crate) mod expand_external_tokens;
|
||||
pub(crate) mod external_command;
|
||||
pub(crate) mod named;
|
||||
pub(crate) mod path;
|
||||
pub(crate) mod range;
|
||||
pub mod syntax_shape;
|
||||
pub(crate) mod tokens_iterator;
|
||||
|
||||
use crate::hir::syntax_shape::Member;
|
||||
use crate::parse::operator::CompareOperator;
|
||||
use crate::parse::parser::Number;
|
||||
use crate::parse::unit::Unit;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_protocol::{PathMember, ShellTypeName};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebug, PrettyDebugRefineKind,
|
||||
PrettyDebugWithSource, Span, Spanned,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::parse::number::RawNumber;
|
||||
|
||||
pub(crate) use self::binary::Binary;
|
||||
pub(crate) use self::path::Path;
|
||||
pub(crate) use self::range::Range;
|
||||
pub(crate) use self::tokens_iterator::TokensIterator;
|
||||
|
||||
pub use self::external_command::ExternalCommand;
|
||||
pub use self::named::{NamedArguments, NamedValue};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Signature {
|
||||
unspanned: nu_protocol::Signature,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl Signature {
|
||||
pub fn new(unspanned: nu_protocol::Signature, span: impl Into<Span>) -> Signature {
|
||||
Signature {
|
||||
unspanned,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Signature {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Signature {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
self.unspanned.pretty_debug(source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
||||
pub struct Call {
|
||||
#[get = "pub(crate)"]
|
||||
pub head: Box<SpannedExpression>,
|
||||
#[get = "pub(crate)"]
|
||||
pub positional: Option<Vec<SpannedExpression>>,
|
||||
#[get = "pub(crate)"]
|
||||
pub named: Option<NamedArguments>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl Call {
|
||||
pub fn switch_preset(&self, switch: &str) -> bool {
|
||||
self.named
|
||||
.as_ref()
|
||||
.and_then(|n| n.get(switch))
|
||||
.map(|t| match t {
|
||||
NamedValue::PresentSwitch(_) => true,
|
||||
_ => false,
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Call {
|
||||
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||
match refine {
|
||||
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||
PrettyDebugRefineKind::WithContext => {
|
||||
self.head
|
||||
.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source)
|
||||
+ b::preceded_option(
|
||||
Some(b::space()),
|
||||
self.positional.as_ref().map(|pos| {
|
||||
b::intersperse(
|
||||
pos.iter().map(|expr| {
|
||||
expr.refined_pretty_debug(
|
||||
PrettyDebugRefineKind::WithContext,
|
||||
source,
|
||||
)
|
||||
}),
|
||||
b::space(),
|
||||
)
|
||||
}),
|
||||
)
|
||||
+ b::preceded_option(
|
||||
Some(b::space()),
|
||||
self.named.as_ref().map(|named| {
|
||||
named.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source)
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"call",
|
||||
self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Expression {
|
||||
Literal(Literal),
|
||||
ExternalWord,
|
||||
Synthetic(Synthetic),
|
||||
Variable(Variable),
|
||||
Binary(Box<Binary>),
|
||||
Range(Box<Range>),
|
||||
Block(Vec<SpannedExpression>),
|
||||
List(Vec<SpannedExpression>),
|
||||
Path(Box<Path>),
|
||||
|
||||
FilePath(PathBuf),
|
||||
ExternalCommand(ExternalCommand),
|
||||
Command(Span),
|
||||
|
||||
Boolean(bool),
|
||||
}
|
||||
|
||||
impl ShellTypeName for Expression {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
Expression::Literal(literal) => literal.type_name(),
|
||||
Expression::Synthetic(synthetic) => synthetic.type_name(),
|
||||
Expression::Command(..) => "command",
|
||||
Expression::ExternalWord => "external word",
|
||||
Expression::FilePath(..) => "file path",
|
||||
Expression::Variable(..) => "variable",
|
||||
Expression::List(..) => "list",
|
||||
Expression::Binary(..) => "binary",
|
||||
Expression::Range(..) => "range",
|
||||
Expression::Block(..) => "block",
|
||||
Expression::Path(..) => "variable path",
|
||||
Expression::Boolean(..) => "boolean",
|
||||
Expression::ExternalCommand(..) => "external",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Synthetic {
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl ShellTypeName for Synthetic {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
Synthetic::String(_) => "string",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpanned for Expression {
|
||||
type Output = SpannedExpression;
|
||||
|
||||
fn into_spanned(self, span: impl Into<Span>) -> Self::Output {
|
||||
SpannedExpression {
|
||||
expr: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
pub fn into_expr(self, span: impl Into<Span>) -> SpannedExpression {
|
||||
self.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn into_unspanned_expr(self) -> SpannedExpression {
|
||||
SpannedExpression {
|
||||
expr: self,
|
||||
span: Span::unknown(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub struct SpannedExpression {
|
||||
pub expr: Expression,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl std::ops::Deref for SpannedExpression {
|
||||
type Target = Expression;
|
||||
|
||||
fn deref(&self) -> &Expression {
|
||||
&self.expr
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for SpannedExpression {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for SpannedExpression {
|
||||
fn type_name(&self) -> &'static str {
|
||||
self.expr.type_name()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for SpannedExpression {
|
||||
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||
match refine {
|
||||
PrettyDebugRefineKind::ContextFree => self.refined_pretty_debug(refine, source),
|
||||
PrettyDebugRefineKind::WithContext => match &self.expr {
|
||||
Expression::Literal(literal) => literal
|
||||
.clone()
|
||||
.into_spanned(self.span)
|
||||
.refined_pretty_debug(refine, source),
|
||||
Expression::ExternalWord => {
|
||||
b::delimit("e\"", b::primitive(self.span.slice(source)), "\"").group()
|
||||
}
|
||||
Expression::Synthetic(s) => match s {
|
||||
Synthetic::String(_) => {
|
||||
b::delimit("s\"", b::primitive(self.span.slice(source)), "\"").group()
|
||||
}
|
||||
},
|
||||
Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)),
|
||||
Expression::Variable(Variable::It(_)) => b::keyword("$it"),
|
||||
Expression::Binary(binary) => binary.pretty_debug(source),
|
||||
Expression::Range(range) => range.pretty_debug(source),
|
||||
Expression::Block(_) => b::opaque("block"),
|
||||
Expression::List(list) => b::delimit(
|
||||
"[",
|
||||
b::intersperse(
|
||||
list.iter()
|
||||
.map(|item| item.refined_pretty_debug(refine, source)),
|
||||
b::space(),
|
||||
),
|
||||
"]",
|
||||
),
|
||||
Expression::Path(path) => path.pretty_debug(source),
|
||||
Expression::FilePath(path) => b::typed("path", b::primitive(path.display())),
|
||||
Expression::ExternalCommand(external) => {
|
||||
b::keyword("^") + b::keyword(external.name.slice(source))
|
||||
}
|
||||
Expression::Command(command) => b::keyword(command.slice(source)),
|
||||
Expression::Boolean(boolean) => match boolean {
|
||||
true => b::primitive("$yes"),
|
||||
false => b::primitive("$no"),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match &self.expr {
|
||||
Expression::Literal(literal) => {
|
||||
literal.clone().into_spanned(self.span).pretty_debug(source)
|
||||
}
|
||||
Expression::ExternalWord => {
|
||||
b::typed("external word", b::primitive(self.span.slice(source)))
|
||||
}
|
||||
Expression::Synthetic(s) => match s {
|
||||
Synthetic::String(s) => b::typed("synthetic", b::primitive(format!("{:?}", s))),
|
||||
},
|
||||
Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)),
|
||||
Expression::Variable(Variable::It(_)) => b::keyword("$it"),
|
||||
Expression::Binary(binary) => binary.pretty_debug(source),
|
||||
Expression::Range(range) => range.pretty_debug(source),
|
||||
Expression::Block(_) => b::opaque("block"),
|
||||
Expression::List(list) => b::delimit(
|
||||
"[",
|
||||
b::intersperse(
|
||||
list.iter().map(|item| item.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
"]",
|
||||
),
|
||||
Expression::Path(path) => path.pretty_debug(source),
|
||||
Expression::FilePath(path) => b::typed("path", b::primitive(path.display())),
|
||||
Expression::ExternalCommand(external) => b::typed(
|
||||
"command",
|
||||
b::keyword("^") + b::primitive(external.name.slice(source)),
|
||||
),
|
||||
Expression::Command(command) => {
|
||||
b::typed("command", b::primitive(command.slice(source)))
|
||||
}
|
||||
Expression::Boolean(boolean) => match boolean {
|
||||
true => b::primitive("$yes"),
|
||||
false => b::primitive("$no"),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
pub fn number(i: impl Into<Number>) -> Expression {
|
||||
Expression::Literal(Literal::Number(i.into()))
|
||||
}
|
||||
|
||||
pub fn size(i: impl Into<Number>, unit: impl Into<Unit>) -> Expression {
|
||||
Expression::Literal(Literal::Size(i.into(), unit.into()))
|
||||
}
|
||||
|
||||
pub fn string(inner: impl Into<Span>) -> Expression {
|
||||
Expression::Literal(Literal::String(inner.into()))
|
||||
}
|
||||
|
||||
pub fn synthetic_string(string: impl Into<String>) -> Expression {
|
||||
Expression::Synthetic(Synthetic::String(string.into()))
|
||||
}
|
||||
|
||||
pub fn column_path(members: Vec<Member>) -> Expression {
|
||||
Expression::Literal(Literal::ColumnPath(members))
|
||||
}
|
||||
|
||||
pub fn path(head: SpannedExpression, tail: Vec<impl Into<PathMember>>) -> Expression {
|
||||
let tail = tail.into_iter().map(|t| t.into()).collect();
|
||||
Expression::Path(Box::new(Path::new(head, tail)))
|
||||
}
|
||||
|
||||
pub fn dot_member(head: SpannedExpression, next: impl Into<PathMember>) -> Expression {
|
||||
let SpannedExpression { expr: item, span } = head;
|
||||
let next = next.into();
|
||||
|
||||
match item {
|
||||
Expression::Path(path) => {
|
||||
let (head, mut tail) = path.parts();
|
||||
|
||||
tail.push(next);
|
||||
Expression::path(head, tail)
|
||||
}
|
||||
|
||||
other => Expression::path(other.into_expr(span), vec![next]),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn infix(
|
||||
left: SpannedExpression,
|
||||
op: Spanned<impl Into<CompareOperator>>,
|
||||
right: SpannedExpression,
|
||||
) -> Expression {
|
||||
Expression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
|
||||
}
|
||||
|
||||
pub fn range(left: SpannedExpression, op: Span, right: SpannedExpression) -> Expression {
|
||||
Expression::Range(Box::new(Range::new(left, op, right)))
|
||||
}
|
||||
|
||||
pub fn file_path(path: impl Into<PathBuf>) -> Expression {
|
||||
Expression::FilePath(path.into())
|
||||
}
|
||||
|
||||
pub fn list(list: Vec<SpannedExpression>) -> Expression {
|
||||
Expression::List(list)
|
||||
}
|
||||
|
||||
pub fn bare() -> Expression {
|
||||
Expression::Literal(Literal::Bare)
|
||||
}
|
||||
|
||||
pub fn pattern(inner: impl Into<String>) -> Expression {
|
||||
Expression::Literal(Literal::GlobPattern(inner.into()))
|
||||
}
|
||||
|
||||
pub fn variable(inner: impl Into<Span>) -> Expression {
|
||||
Expression::Variable(Variable::Other(inner.into()))
|
||||
}
|
||||
|
||||
pub fn external_command(inner: impl Into<Span>) -> Expression {
|
||||
Expression::ExternalCommand(ExternalCommand::new(inner.into()))
|
||||
}
|
||||
|
||||
pub fn it_variable(inner: impl Into<Span>) -> Expression {
|
||||
Expression::Variable(Variable::It(inner.into()))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Spanned<Path>> for SpannedExpression {
|
||||
fn from(path: Spanned<Path>) -> SpannedExpression {
|
||||
Expression::Path(Box::new(path.item)).into_expr(path.span)
|
||||
}
|
||||
}
|
||||
|
||||
/// Literals are expressions that are:
|
||||
///
|
||||
/// 1. Copy
|
||||
/// 2. Can be evaluated without additional context
|
||||
/// 3. Evaluation cannot produce an error
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Literal {
|
||||
Number(Number),
|
||||
Size(Number, Unit),
|
||||
String(Span),
|
||||
GlobPattern(String),
|
||||
ColumnPath(Vec<Member>),
|
||||
Bare,
|
||||
}
|
||||
|
||||
impl Literal {
|
||||
pub fn into_spanned(self, span: impl Into<Span>) -> SpannedLiteral {
|
||||
SpannedLiteral {
|
||||
literal: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub struct SpannedLiteral {
|
||||
pub literal: Literal,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl ShellTypeName for Literal {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match &self {
|
||||
Literal::Number(..) => "number",
|
||||
Literal::Size(..) => "size",
|
||||
Literal::String(..) => "string",
|
||||
Literal::ColumnPath(..) => "column path",
|
||||
Literal::Bare => "string",
|
||||
Literal::GlobPattern(_) => "pattern",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for SpannedLiteral {
|
||||
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||
match refine {
|
||||
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||
PrettyDebugRefineKind::WithContext => match &self.literal {
|
||||
Literal::Number(number) => number.pretty(),
|
||||
Literal::Size(number, unit) => (number.pretty() + unit.pretty()).group(),
|
||||
Literal::String(string) => b::primitive(format!("{:?}", string.slice(source))),
|
||||
Literal::GlobPattern(pattern) => b::primitive(pattern),
|
||||
Literal::ColumnPath(path) => {
|
||||
b::intersperse_with_source(path.iter(), b::space(), source)
|
||||
}
|
||||
Literal::Bare => b::delimit("b\"", b::primitive(self.span.slice(source)), "\""),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match &self.literal {
|
||||
Literal::Number(number) => number.pretty(),
|
||||
Literal::Size(number, unit) => {
|
||||
b::typed("size", (number.pretty() + unit.pretty()).group())
|
||||
}
|
||||
Literal::String(string) => b::typed(
|
||||
"string",
|
||||
b::primitive(format!("{:?}", string.slice(source))),
|
||||
),
|
||||
Literal::GlobPattern(pattern) => b::typed("pattern", b::primitive(pattern)),
|
||||
Literal::ColumnPath(path) => b::typed(
|
||||
"column path",
|
||||
b::intersperse_with_source(path.iter(), b::space(), source),
|
||||
),
|
||||
Literal::Bare => b::typed("bare", b::primitive(self.span.slice(source))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Variable {
|
||||
It(Span),
|
||||
Other(Span),
|
||||
}
|
2
crates/nu-parser/src/hir/baseline_parse.rs
Normal file
2
crates/nu-parser/src/hir/baseline_parse.rs
Normal file
@ -0,0 +1,2 @@
|
||||
#[cfg(test)]
|
||||
pub mod tests;
|
297
crates/nu-parser/src/hir/baseline_parse/tests.rs
Normal file
297
crates/nu-parser/src/hir/baseline_parse/tests.rs
Normal file
@ -0,0 +1,297 @@
|
||||
use crate::commands::classified::{internal::InternalCommand, ClassifiedCommand};
|
||||
use crate::hir::expand_external_tokens::{ExternalTokensShape, ExternalTokensSyntax};
|
||||
use crate::hir::{
|
||||
self, named::NamedValue, syntax_shape::*, Expression, NamedArguments, SpannedExpression,
|
||||
TokensIterator,
|
||||
};
|
||||
use crate::parse::files::Files;
|
||||
use crate::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
||||
use crate::SpannedToken;
|
||||
use derive_new::new;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_protocol::{outln, PathMember, Signature, SyntaxShape};
|
||||
use nu_source::{HasSpan, PrettyDebugWithSource, Span, SpannedItem, Tag, Text};
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[test]
|
||||
fn test_parse_external() {
|
||||
parse_tokens(
|
||||
fallible(ExternalTokensShape),
|
||||
"5kb",
|
||||
vec![b::bare("5kb")],
|
||||
|tokens| {
|
||||
ExternalTokensSyntax::new(
|
||||
vec![format!("5kb").spanned(tokens[0].span())].spanned(tokens[0].span()),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
parse_tokens(
|
||||
fallible(ExternalTokensShape),
|
||||
"cargo +nightly run -- --features all",
|
||||
vec![
|
||||
b::bare("cargo"),
|
||||
b::sp(),
|
||||
b::external_word("+nightly"),
|
||||
b::sp(),
|
||||
b::bare("run"),
|
||||
b::sp(),
|
||||
b::external_word("--"),
|
||||
b::sp(),
|
||||
b::flag("features"),
|
||||
b::sp(),
|
||||
b::bare("all"),
|
||||
],
|
||||
|tokens| {
|
||||
let cargo = format!("cargo").spanned(tokens[0].span());
|
||||
let nightly = format!("+nightly").spanned(tokens[2].span());
|
||||
let run = format!("run").spanned(tokens[4].span());
|
||||
let dashdash = format!("--").spanned(tokens[6].span());
|
||||
let features = format!("--features").spanned(tokens[8].span());
|
||||
let all = format!("all").spanned(tokens[10].span());
|
||||
let span = tokens[0].span().until(tokens[10].span());
|
||||
|
||||
ExternalTokensSyntax::new(
|
||||
vec![cargo, nightly, run, dashdash, features, all].spanned(span),
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_string() {
|
||||
parse_tokens(
|
||||
CoerceStringShape,
|
||||
r#""hello""#,
|
||||
vec![b::string("hello")],
|
||||
|tokens| {
|
||||
Expression::string(inner_string_span(tokens[0].span())).into_expr(tokens[0].span())
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_path() {
|
||||
let _ = pretty_env_logger::try_init();
|
||||
|
||||
parse_expr(
|
||||
AnyExpressionShape,
|
||||
"$it.cpu",
|
||||
vec![b::it_var(), b::dot(), b::bare("cpu")],
|
||||
|tokens| {
|
||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||
let bare = tokens[2].expect_bare();
|
||||
Expression::path(
|
||||
Expression::it_variable(inner_var).into_expr(outer_var),
|
||||
vec![PathMember::string("cpu", bare)],
|
||||
)
|
||||
.into_expr(outer_var.until(bare))
|
||||
},
|
||||
);
|
||||
|
||||
parse_expr(
|
||||
VariablePathShape,
|
||||
r#"$cpu.amount."max ghz""#,
|
||||
vec![
|
||||
b::var("cpu"),
|
||||
b::dot(),
|
||||
b::bare("amount"),
|
||||
b::dot(),
|
||||
b::string("max ghz"),
|
||||
],
|
||||
|tokens| {
|
||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||
let amount = tokens[2].expect_bare();
|
||||
let (outer_max_ghz, _) = tokens[4].expect_string();
|
||||
|
||||
Expression::path(
|
||||
Expression::variable(inner_var).into_expr(outer_var),
|
||||
vec![
|
||||
PathMember::string("amount", amount),
|
||||
PathMember::string("max ghz", outer_max_ghz),
|
||||
],
|
||||
)
|
||||
.into_expr(outer_var.until(outer_max_ghz))
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_command() {
|
||||
parse_tokens(
|
||||
fallible(ClassifiedCommandShape),
|
||||
"ls *.txt",
|
||||
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
||||
|tokens| {
|
||||
let bare = tokens[0].expect_bare();
|
||||
let pat = tokens[2].expect_pattern();
|
||||
|
||||
let mut map = IndexMap::new();
|
||||
map.insert("full".to_string(), NamedValue::AbsentSwitch);
|
||||
map.insert("help".to_string(), NamedValue::AbsentSwitch);
|
||||
|
||||
ClassifiedCommand::Internal(InternalCommand::new(
|
||||
"ls".to_string(),
|
||||
Tag {
|
||||
span: bare,
|
||||
anchor: None,
|
||||
},
|
||||
hir::Call {
|
||||
head: Box::new(Expression::Command(bare).into_expr(bare)),
|
||||
positional: Some(vec![Expression::pattern("*.txt").into_expr(pat)]),
|
||||
named: Some(NamedArguments { named: map }),
|
||||
span: bare.until(pat),
|
||||
},
|
||||
))
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, new)]
|
||||
struct TestRegistry {
|
||||
#[new(default)]
|
||||
signatures: indexmap::IndexMap<String, Signature>,
|
||||
}
|
||||
|
||||
impl TestRegistry {
|
||||
fn insert(&mut self, key: &str, value: Signature) {
|
||||
self.signatures.insert(key.to_string(), value);
|
||||
}
|
||||
}
|
||||
|
||||
impl SignatureRegistry for TestRegistry {
|
||||
fn has(&self, name: &str) -> bool {
|
||||
self.signatures.contains_key(name)
|
||||
}
|
||||
fn get(&self, name: &str) -> Option<Signature> {
|
||||
self.signatures.get(name).cloned()
|
||||
}
|
||||
fn clone_box(&self) -> Box<dyn SignatureRegistry> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
fn with_empty_context(source: &Text, callback: impl FnOnce(ExpandContext)) {
|
||||
let mut registry = TestRegistry::new();
|
||||
registry.insert(
|
||||
"ls",
|
||||
Signature::build("ls")
|
||||
.optional(
|
||||
"path",
|
||||
SyntaxShape::Pattern,
|
||||
"a path to get the directory contents from",
|
||||
)
|
||||
.switch("full", "list all available columns for each entry"),
|
||||
);
|
||||
|
||||
callback(ExpandContext::new(Box::new(registry), source, None))
|
||||
}
|
||||
|
||||
trait Expand {}
|
||||
|
||||
fn parse_tokens<T: Eq + HasSpan + PrettyDebugWithSource + Clone + Debug + 'static>(
|
||||
shape: impl ExpandSyntax<Output = Result<T, ParseError>>,
|
||||
syntax: &str,
|
||||
tokens: Vec<CurriedToken>,
|
||||
expected: impl FnOnce(&[SpannedToken]) -> T,
|
||||
) {
|
||||
// let parsed_tokens = parse(syntax);
|
||||
let tokens = b::token_list(tokens);
|
||||
let (tokens, source) = b::build(tokens);
|
||||
let text = Text::from(&source);
|
||||
|
||||
assert_eq!(syntax, source);
|
||||
|
||||
with_empty_context(&text, |context| {
|
||||
let tokens = tokens.expect_list();
|
||||
let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span);
|
||||
|
||||
let expr = iterator.expand_syntax(shape);
|
||||
|
||||
let expr = match expr {
|
||||
Ok(expr) => expr,
|
||||
Err(err) => {
|
||||
outln!("");
|
||||
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||
outln!("");
|
||||
|
||||
print_err(err.into(), &iterator.context().source().clone());
|
||||
panic!("Parse failed");
|
||||
}
|
||||
};
|
||||
|
||||
let expected = expected(&tokens.item);
|
||||
|
||||
if expr != expected {
|
||||
outln!("");
|
||||
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||
outln!("");
|
||||
|
||||
assert_eq!(expr, expected);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_expr(
|
||||
shape: impl ExpandSyntax<Output = Result<SpannedExpression, ParseError>>,
|
||||
syntax: &str,
|
||||
tokens: Vec<CurriedToken>,
|
||||
expected: impl FnOnce(&[SpannedToken]) -> SpannedExpression,
|
||||
) {
|
||||
// let parsed_tokens = parse(syntax);
|
||||
let tokens = b::token_list(tokens);
|
||||
let (tokens, source) = b::build(tokens);
|
||||
let text = Text::from(&source);
|
||||
|
||||
assert_eq!(syntax, source);
|
||||
|
||||
with_empty_context(&text, |context| {
|
||||
let tokens = tokens.expect_list();
|
||||
let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span);
|
||||
|
||||
let expr = iterator.expand_syntax(shape);
|
||||
|
||||
let expr = match expr {
|
||||
Ok(expr) => expr,
|
||||
Err(err) => {
|
||||
outln!("");
|
||||
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||
outln!("");
|
||||
|
||||
print_err(err.into(), &iterator.source());
|
||||
panic!("Parse failed");
|
||||
}
|
||||
};
|
||||
|
||||
let expected = expected(&tokens.item);
|
||||
|
||||
if expr != expected {
|
||||
outln!("");
|
||||
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||
outln!("");
|
||||
|
||||
assert_eq!(expr, expected);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn inner_string_span(span: Span) -> Span {
|
||||
Span::new(span.start() + 1, span.end() - 1)
|
||||
}
|
||||
|
||||
pub fn print_err(err: ShellError, source: &Text) {
|
||||
let diag = err.into_diagnostic();
|
||||
|
||||
let writer = termcolor::StandardStream::stderr(termcolor::ColorChoice::Auto);
|
||||
let mut source = source.to_string();
|
||||
source.push_str(" ");
|
||||
let files = Files::new(source);
|
||||
let _ = language_reporting::emit(
|
||||
&mut writer.lock(),
|
||||
&files,
|
||||
&diag,
|
||||
&language_reporting::DefaultConfig,
|
||||
);
|
||||
}
|
@ -1,19 +1,18 @@
|
||||
use crate::parser::{hir::Expression, Operator};
|
||||
use crate::prelude::*;
|
||||
use crate::{hir::SpannedExpression, CompareOperator};
|
||||
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::Spanned;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Spanned};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
||||
)]
|
||||
#[get = "pub(crate)"]
|
||||
#[get = "pub"]
|
||||
pub struct Binary {
|
||||
left: Expression,
|
||||
op: Spanned<Operator>,
|
||||
right: Expression,
|
||||
left: SpannedExpression,
|
||||
op: Spanned<CompareOperator>,
|
||||
right: SpannedExpression,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Binary {
|
175
crates/nu-parser/src/hir/expand_external_tokens.rs
Normal file
175
crates/nu-parser/src/hir/expand_external_tokens.rs
Normal file
@ -0,0 +1,175 @@
|
||||
use crate::parse::token_tree::Token;
|
||||
use crate::{
|
||||
hir::syntax_shape::{ExpandSyntax, FlatShape, MaybeSpaceShape},
|
||||
TokensIterator,
|
||||
};
|
||||
use derive_new::new;
|
||||
use nu_errors::ParseError;
|
||||
use nu_protocol::SpannedTypeName;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem};
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, new)]
|
||||
pub struct ExternalTokensSyntax {
|
||||
pub tokens: Spanned<Vec<Spanned<String>>>,
|
||||
}
|
||||
|
||||
impl HasSpan for ExternalTokensSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.tokens.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for ExternalTokensSyntax {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.tokens
|
||||
.iter()
|
||||
.map(|token| b::primitive(format!("{:?}", token.item))),
|
||||
b::space(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExternalTokensShape;
|
||||
|
||||
impl ExpandSyntax for ExternalTokensShape {
|
||||
type Output = ExternalTokensSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"external tokens"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> ExternalTokensSyntax {
|
||||
let mut out: Vec<Spanned<String>> = vec![];
|
||||
|
||||
let start = token_nodes.span_at_cursor();
|
||||
|
||||
loop {
|
||||
match token_nodes.expand_syntax(ExternalExpressionShape) {
|
||||
Err(_) => break,
|
||||
Ok(span) => out.push(span.spanned_string(&token_nodes.source())),
|
||||
}
|
||||
}
|
||||
|
||||
let end = token_nodes.span_at_cursor();
|
||||
|
||||
ExternalTokensSyntax {
|
||||
tokens: out.spanned(start.until(end)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExternalExpressionShape;
|
||||
|
||||
impl ExpandSyntax for ExternalExpressionShape {
|
||||
type Output = Result<Span, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"external expression"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||
|
||||
let first = token_nodes.expand_syntax(ExternalStartToken)?;
|
||||
let mut last = first;
|
||||
|
||||
loop {
|
||||
let continuation = token_nodes.expand_syntax(ExternalStartToken);
|
||||
|
||||
if let Ok(continuation) = continuation {
|
||||
last = continuation;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(first.until(last))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct ExternalStartToken;
|
||||
|
||||
impl ExpandSyntax for ExternalStartToken {
|
||||
type Output = Result<Span, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"external start token"
|
||||
}
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
let mut span: Option<Span> = None;
|
||||
|
||||
loop {
|
||||
let boundary = token_nodes.expand_infallible(PeekExternalBoundary);
|
||||
|
||||
if boundary {
|
||||
break;
|
||||
}
|
||||
|
||||
let peeked = token_nodes.peek().not_eof("external start token")?;
|
||||
let node = peeked.node;
|
||||
|
||||
let new_span = match node.unspanned() {
|
||||
Token::Comment(_)
|
||||
| Token::Separator
|
||||
| Token::Whitespace
|
||||
| Token::Pipeline(_) => {
|
||||
return Err(ParseError::mismatch(
|
||||
"external start token",
|
||||
node.spanned_type_name(),
|
||||
))
|
||||
}
|
||||
|
||||
_ => {
|
||||
let node = peeked.commit();
|
||||
node.span()
|
||||
}
|
||||
};
|
||||
|
||||
span = match span {
|
||||
None => Some(new_span),
|
||||
Some(before) => Some(before.until(new_span)),
|
||||
};
|
||||
}
|
||||
|
||||
match span {
|
||||
None => Err(token_nodes.err_next_token("external start token")),
|
||||
Some(span) => {
|
||||
token_nodes.color_shape(FlatShape::ExternalWord.spanned(span));
|
||||
Ok(span)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct PeekExternalBoundary;
|
||||
|
||||
impl ExpandSyntax for PeekExternalBoundary {
|
||||
type Output = bool;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"external boundary"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output {
|
||||
let next = token_nodes.peek();
|
||||
|
||||
match next.node {
|
||||
None => true,
|
||||
Some(node) => match node.unspanned() {
|
||||
Token::Delimited(_) => true,
|
||||
Token::Whitespace => true,
|
||||
Token::Comment(_) => true,
|
||||
Token::Separator => true,
|
||||
Token::Call(_) => true,
|
||||
_ => false,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
@ -1,12 +1,12 @@
|
||||
use crate::prelude::*;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::Span;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
||||
)]
|
||||
#[get = "pub(crate)"]
|
||||
#[get = "pub"]
|
||||
pub struct ExternalCommand {
|
||||
pub(crate) name: Span,
|
||||
}
|
@ -1,8 +1,8 @@
|
||||
use crate::parser::hir::Expression;
|
||||
use crate::parser::Flag;
|
||||
use crate::prelude::*;
|
||||
use crate::hir::SpannedExpression;
|
||||
use crate::Flag;
|
||||
use indexmap::IndexMap;
|
||||
use log::trace;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugRefineKind, PrettyDebugWithSource, Tag};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||
@ -10,7 +10,7 @@ pub enum NamedValue {
|
||||
AbsentSwitch,
|
||||
PresentSwitch(Tag),
|
||||
AbsentValue,
|
||||
Value(Expression),
|
||||
Value(SpannedExpression),
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for NamedValue {
|
||||
@ -22,18 +22,36 @@ impl PrettyDebugWithSource for NamedValue {
|
||||
NamedValue::Value(value) => value.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
|
||||
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||
match refine {
|
||||
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||
PrettyDebugRefineKind::WithContext => match self {
|
||||
NamedValue::AbsentSwitch => b::value("absent"),
|
||||
NamedValue::PresentSwitch(_) => b::value("present"),
|
||||
NamedValue::AbsentValue => b::value("absent"),
|
||||
NamedValue::Value(value) => value.refined_pretty_debug(refine, source),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct NamedArguments {
|
||||
pub(crate) named: IndexMap<String, NamedValue>,
|
||||
pub named: IndexMap<String, NamedValue>,
|
||||
}
|
||||
|
||||
impl NamedArguments {
|
||||
pub fn new() -> NamedArguments {
|
||||
NamedArguments {
|
||||
named: IndexMap::new(),
|
||||
Default::default()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&String, &NamedValue)> {
|
||||
self.named.iter()
|
||||
}
|
||||
|
||||
pub fn get(&self, name: &str) -> Option<&NamedValue> {
|
||||
self.named.get(name)
|
||||
}
|
||||
}
|
||||
|
||||
@ -43,7 +61,7 @@ impl NamedArguments {
|
||||
trace!("Inserting switch -- {} = {:?}", name, switch);
|
||||
|
||||
match switch {
|
||||
None => self.named.insert(name.into(), NamedValue::AbsentSwitch),
|
||||
None => self.named.insert(name, NamedValue::AbsentSwitch),
|
||||
Some(flag) => self.named.insert(
|
||||
name,
|
||||
NamedValue::PresentSwitch(Tag {
|
||||
@ -54,28 +72,37 @@ impl NamedArguments {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn insert_optional(&mut self, name: impl Into<String>, expr: Option<Expression>) {
|
||||
pub fn insert_optional(&mut self, name: impl Into<String>, expr: Option<SpannedExpression>) {
|
||||
match expr {
|
||||
None => self.named.insert(name.into(), NamedValue::AbsentValue),
|
||||
Some(expr) => self.named.insert(name.into(), NamedValue::Value(expr)),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn insert_mandatory(&mut self, name: impl Into<String>, expr: Expression) {
|
||||
pub fn insert_mandatory(&mut self, name: impl Into<String>, expr: SpannedExpression) {
|
||||
self.named.insert(name.into(), NamedValue::Value(expr));
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for NamedArguments {
|
||||
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||
match refine {
|
||||
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||
PrettyDebugRefineKind::WithContext => b::intersperse(
|
||||
self.named.iter().map(|(key, value)| {
|
||||
b::key(key)
|
||||
+ b::equals()
|
||||
+ value.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source)
|
||||
}),
|
||||
b::space(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
"(",
|
||||
b::intersperse(
|
||||
self.named
|
||||
.iter()
|
||||
.map(|(key, value)| b::key(key) + b::equals() + value.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source),
|
||||
")",
|
||||
)
|
||||
}
|
41
crates/nu-parser/src/hir/path.rs
Normal file
41
crates/nu-parser/src/hir/path.rs
Normal file
@ -0,0 +1,41 @@
|
||||
use crate::hir::SpannedExpression;
|
||||
use derive_new::new;
|
||||
use getset::{Getters, MutGetters};
|
||||
use nu_protocol::PathMember;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
Clone,
|
||||
Eq,
|
||||
PartialEq,
|
||||
Ord,
|
||||
PartialOrd,
|
||||
Hash,
|
||||
Getters,
|
||||
MutGetters,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
new,
|
||||
)]
|
||||
#[get = "pub"]
|
||||
pub struct Path {
|
||||
head: SpannedExpression,
|
||||
#[get_mut = "pub(crate)"]
|
||||
tail: Vec<PathMember>,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Path {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
self.head.pretty_debug(source)
|
||||
+ b::operator(".")
|
||||
+ b::intersperse(self.tail.iter().map(|m| m.pretty()), b::operator("."))
|
||||
}
|
||||
}
|
||||
|
||||
impl Path {
|
||||
pub(crate) fn parts(self) -> (SpannedExpression, Vec<PathMember>) {
|
||||
(self.head, self.tail)
|
||||
}
|
||||
}
|
33
crates/nu-parser/src/hir/range.rs
Normal file
33
crates/nu-parser/src/hir/range.rs
Normal file
@ -0,0 +1,33 @@
|
||||
use crate::hir::SpannedExpression;
|
||||
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
||||
)]
|
||||
pub struct Range {
|
||||
#[get = "pub"]
|
||||
left: SpannedExpression,
|
||||
#[get = "pub"]
|
||||
dotdot: Span,
|
||||
#[get = "pub"]
|
||||
right: SpannedExpression,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Range {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
"<",
|
||||
self.left.pretty_debug(source)
|
||||
+ b::space()
|
||||
+ b::keyword(self.dotdot.slice(source))
|
||||
+ b::space()
|
||||
+ self.right.pretty_debug(source),
|
||||
">",
|
||||
)
|
||||
.group()
|
||||
}
|
||||
}
|
475
crates/nu-parser/src/hir/signature.rs
Normal file
475
crates/nu-parser/src/hir/signature.rs
Normal file
@ -0,0 +1,475 @@
|
||||
use crate::hir;
|
||||
use crate::hir::syntax_shape::{
|
||||
expand_atom, expand_syntax, BareShape, ExpandContext, ExpandSyntax, ExpansionRule,
|
||||
UnspannedAtomicToken, WhitespaceShape,
|
||||
};
|
||||
use crate::hir::tokens_iterator::TokensIterator;
|
||||
use crate::parse::comment::Comment;
|
||||
use derive_new::new;
|
||||
use nu_errors::ParseError;
|
||||
use nu_protocol::{RowType, SpannedTypeName, Type};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasFallibleSpan, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||
};
|
||||
use std::fmt::Debug;
|
||||
|
||||
// A Signature is a command without implementation.
|
||||
//
|
||||
// In Nu, a command is a function combined with macro expansion rules.
|
||||
//
|
||||
// def cd
|
||||
// # Change to a new path.
|
||||
// optional directory(Path) # the directory to change to
|
||||
// end
|
||||
|
||||
#[derive(new)]
|
||||
struct Expander<'a, 'b, 'c, 'd> {
|
||||
iterator: &'b mut TokensIterator<'a>,
|
||||
context: &'d ExpandContext<'c>,
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'c, 'd> Expander<'a, 'b, 'c, 'd> {
|
||||
fn expand<O>(&mut self, syntax: impl ExpandSyntax<Output = O>) -> Result<O, ParseError>
|
||||
where
|
||||
O: HasFallibleSpan + Clone + std::fmt::Debug + 'static,
|
||||
{
|
||||
expand_syntax(&syntax, self.iterator, self.context)
|
||||
}
|
||||
|
||||
fn optional<O>(&mut self, syntax: impl ExpandSyntax<Output = O>) -> Option<O>
|
||||
where
|
||||
O: HasFallibleSpan + Clone + std::fmt::Debug + 'static,
|
||||
{
|
||||
match expand_syntax(&syntax, self.iterator, self.context) {
|
||||
Err(_) => None,
|
||||
Ok(value) => Some(value),
|
||||
}
|
||||
}
|
||||
|
||||
fn pos(&mut self) -> Span {
|
||||
self.iterator.span_at_cursor()
|
||||
}
|
||||
|
||||
fn slice_string(&mut self, span: impl Into<Span>) -> String {
|
||||
span.into().slice(self.context.source()).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct SignatureShape;
|
||||
|
||||
impl ExpandSyntax for SignatureShape {
|
||||
type Output = hir::Signature;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"signature"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
let mut expander = Expander::new(token_nodes, context);
|
||||
let start = expander.pos();
|
||||
expander.expand(keyword("def"))?;
|
||||
expander.expand(WhitespaceShape)?;
|
||||
let name = expander.expand(BareShape)?;
|
||||
expander.expand(SeparatorShape)?;
|
||||
let usage = expander.expand(CommentShape)?;
|
||||
expander.expand(SeparatorShape)?;
|
||||
let end = expander.pos();
|
||||
|
||||
Ok(hir::Signature::new(
|
||||
nu_protocol::Signature::new(&name.word).desc(expander.slice_string(usage.text)),
|
||||
start.until(end),
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn keyword(kw: &'static str) -> KeywordShape {
|
||||
KeywordShape { keyword: kw }
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct KeywordShape {
|
||||
keyword: &'static str,
|
||||
}
|
||||
|
||||
impl ExpandSyntax for KeywordShape {
|
||||
type Output = Span;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"keyword"
|
||||
}
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "keyword", context, ExpansionRule::new())?;
|
||||
|
||||
if let UnspannedAtomicToken::Word { text } = &atom.unspanned {
|
||||
let word = text.slice(context.source());
|
||||
|
||||
if word == self.keyword {
|
||||
return Ok(atom.span);
|
||||
}
|
||||
}
|
||||
|
||||
Err(ParseError::mismatch(self.keyword, atom.spanned_type_name()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct SeparatorShape;
|
||||
|
||||
impl ExpandSyntax for SeparatorShape {
|
||||
type Output = Span;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"separator"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "separator", context, ExpansionRule::new())?;
|
||||
|
||||
match &atom.unspanned {
|
||||
UnspannedAtomicToken::Separator { text } => Ok(*text),
|
||||
_ => Err(ParseError::mismatch("separator", atom.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct CommentShape;
|
||||
|
||||
impl ExpandSyntax for CommentShape {
|
||||
type Output = Comment;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"comment"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "comment", context, ExpansionRule::new())?;
|
||||
|
||||
match &atom.unspanned {
|
||||
UnspannedAtomicToken::Comment { body } => Ok(Comment::line(body, atom.span)),
|
||||
_ => Err(ParseError::mismatch("separator", atom.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, new)]
|
||||
struct TupleShape<A, B> {
|
||||
first: A,
|
||||
second: B,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, new)]
|
||||
struct TupleSyntax<A, B> {
|
||||
first: A,
|
||||
second: B,
|
||||
}
|
||||
|
||||
impl<A, B> PrettyDebugWithSource for TupleSyntax<A, B>
|
||||
where
|
||||
A: PrettyDebugWithSource,
|
||||
B: PrettyDebugWithSource,
|
||||
{
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"pair",
|
||||
self.first.pretty_debug(source) + b::space() + self.second.pretty_debug(source),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, B> HasFallibleSpan for TupleSyntax<A, B>
|
||||
where
|
||||
A: HasFallibleSpan + Debug + Clone,
|
||||
B: HasFallibleSpan + Debug + Clone,
|
||||
{
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
match (self.first.maybe_span(), self.second.maybe_span()) {
|
||||
(Some(first), Some(second)) => Some(first.until(second)),
|
||||
(Some(first), None) => Some(first),
|
||||
(None, Some(second)) => Some(second),
|
||||
(None, None) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, B, AOut, BOut> ExpandSyntax for TupleShape<A, B>
|
||||
where
|
||||
A: ExpandSyntax<Output = AOut> + Debug + Copy,
|
||||
B: ExpandSyntax<Output = BOut> + Debug + Copy,
|
||||
AOut: HasFallibleSpan + Debug + Clone + 'static,
|
||||
BOut: HasFallibleSpan + Debug + Clone + 'static,
|
||||
{
|
||||
type Output = TupleSyntax<AOut, BOut>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"pair"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
let first = expand_syntax(&self.first, token_nodes, context)?;
|
||||
let second = expand_syntax(&self.second, token_nodes, context)?;
|
||||
|
||||
Ok(TupleSyntax { first, second })
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PositionalParam {
|
||||
optional: Option<Span>,
|
||||
name: Identifier,
|
||||
ty: Spanned<Type>,
|
||||
desc: Spanned<String>,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for PositionalParam {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for PositionalParam {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
(match self.optional {
|
||||
Some(_) => b::description("optional") + b::space(),
|
||||
None => b::blank(),
|
||||
}) + self.ty.pretty_debug(source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PositionalParamShape;
|
||||
|
||||
impl ExpandSyntax for PositionalParamShape {
|
||||
type Output = PositionalParam;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"positional param"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
let mut expander = Expander::new(token_nodes, context);
|
||||
|
||||
let optional = expander
|
||||
.optional(TupleShape::new(keyword("optional"), WhitespaceShape))
|
||||
.map(|s| s.first);
|
||||
|
||||
let name = expander.expand(IdentifierShape)?;
|
||||
|
||||
expander.optional(WhitespaceShape);
|
||||
|
||||
let _ty = expander.expand(TypeShape)?;
|
||||
|
||||
Ok(PositionalParam {
|
||||
optional,
|
||||
name,
|
||||
ty: Type::Nothing.spanned(Span::unknown()),
|
||||
desc: format!("").spanned(Span::unknown()),
|
||||
span: Span::unknown(),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Identifier {
|
||||
body: String,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for Identifier {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Identifier {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed("id", b::description(self.span.slice(source)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct IdentifierShape;
|
||||
|
||||
impl ExpandSyntax for IdentifierShape {
|
||||
type Output = Identifier;
|
||||
fn name(&self) -> &'static str {
|
||||
"identifier"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "identifier", context, ExpansionRule::new())?;
|
||||
|
||||
if let UnspannedAtomicToken::Word { text } = atom.unspanned {
|
||||
let body = text.slice(context.source());
|
||||
if is_id(body) {
|
||||
return Ok(Identifier {
|
||||
body: body.to_string(),
|
||||
span: text,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Err(ParseError::mismatch("identifier", atom.spanned_type_name()))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_id(input: &str) -> bool {
|
||||
let source = nu_source::nom_input(input);
|
||||
match crate::parse::parser::ident(source) {
|
||||
Err(_) => false,
|
||||
Ok((input, _)) => input.fragment.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, new)]
|
||||
struct TypeSyntax {
|
||||
ty: Type,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for TypeSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for TypeSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
self.ty.pretty_debug(source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct TypeShape;
|
||||
|
||||
impl ExpandSyntax for TypeShape {
|
||||
type Output = TypeSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"type"
|
||||
}
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "type", context, ExpansionRule::new())?;
|
||||
|
||||
match atom.unspanned {
|
||||
UnspannedAtomicToken::Word { text } => {
|
||||
let word = text.slice(context.source());
|
||||
|
||||
Ok(TypeSyntax::new(
|
||||
match word {
|
||||
"nothing" => Type::Nothing,
|
||||
"integer" => Type::Int,
|
||||
"decimal" => Type::Decimal,
|
||||
"bytesize" => Type::Bytesize,
|
||||
"string" => Type::String,
|
||||
"column-path" => Type::ColumnPath,
|
||||
"pattern" => Type::Pattern,
|
||||
"boolean" => Type::Boolean,
|
||||
"date" => Type::Date,
|
||||
"duration" => Type::Duration,
|
||||
"filename" => Type::Path,
|
||||
"binary" => Type::Binary,
|
||||
"row" => Type::Row(RowType::new()),
|
||||
"table" => Type::Table(vec![]),
|
||||
"block" => Type::Block,
|
||||
_ => return Err(ParseError::mismatch("type", atom.spanned_type_name())),
|
||||
},
|
||||
atom.span,
|
||||
))
|
||||
}
|
||||
_ => Err(ParseError::mismatch("type", atom.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct TypeAnnotation;
|
||||
|
||||
impl ExpandSyntax for TypeAnnotation {
|
||||
type Output = TypeSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"type annotation"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"type annotation",
|
||||
context,
|
||||
ExpansionRule::new(),
|
||||
)?;
|
||||
|
||||
match atom.unspanned {
|
||||
UnspannedAtomicToken::RoundDelimited { nodes, .. } => {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
token_nodes.child(
|
||||
(&nodes[..]).spanned(atom.span),
|
||||
context.source().clone(),
|
||||
|token_nodes| {
|
||||
let ty = expand_syntax(&TypeShape, token_nodes, context)?;
|
||||
|
||||
let next = token_nodes.peek_non_ws();
|
||||
|
||||
match next.node {
|
||||
None => Ok(ty),
|
||||
Some(node) => {
|
||||
Err(ParseError::extra_tokens(node.spanned_type_name()))
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
_ => Err(ParseError::mismatch(
|
||||
"type annotation",
|
||||
atom.spanned_type_name(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
680
crates/nu-parser/src/hir/syntax_shape.rs
Normal file
680
crates/nu-parser/src/hir/syntax_shape.rs
Normal file
@ -0,0 +1,680 @@
|
||||
#![allow(clippy::large_enum_variant, clippy::type_complexity)]
|
||||
|
||||
mod block;
|
||||
mod expression;
|
||||
pub mod flat_shape;
|
||||
|
||||
use crate::commands::classified::internal::InternalCommand;
|
||||
use crate::commands::classified::{ClassifiedCommand, ClassifiedPipeline};
|
||||
use crate::commands::external_command;
|
||||
use crate::hir;
|
||||
use crate::hir::syntax_shape::block::CoerceBlockShape;
|
||||
use crate::hir::syntax_shape::expression::range::RangeShape;
|
||||
use crate::hir::syntax_shape::flat_shape::ShapeResult;
|
||||
use crate::hir::tokens_iterator::TokensIterator;
|
||||
use crate::hir::{Expression, SpannedExpression};
|
||||
use crate::parse::operator::EvaluationOperator;
|
||||
use crate::parse::token_tree::{
|
||||
ExternalCommandType, PipelineType, SpannedToken, Token, WhitespaceType, WordType,
|
||||
};
|
||||
use crate::parse_command::parse_command_tail;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_errors::ParseError;
|
||||
use nu_protocol::{ShellTypeName, Signature, SpannedTypeName};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||
Tag, TaggedItem, Text,
|
||||
};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub(crate) use self::expression::delimited::DelimitedSquareShape;
|
||||
pub(crate) use self::expression::file_path::{ExternalWordShape, FilePathShape};
|
||||
pub(crate) use self::expression::list::{BackoffColoringMode, ExpressionListShape};
|
||||
pub(crate) use self::expression::number::{
|
||||
DecimalShape, IntExpressionShape, IntShape, NumberExpressionShape, NumberShape,
|
||||
};
|
||||
pub(crate) use self::expression::pattern::{PatternExpressionShape, PatternShape};
|
||||
pub(crate) use self::expression::string::{CoerceStringShape, StringExpressionShape, StringShape};
|
||||
pub(crate) use self::expression::unit::UnitExpressionShape;
|
||||
pub(crate) use self::expression::variable_path::{
|
||||
ColumnPathShape, ColumnPathSyntax, ExpressionContinuationShape, Member, MemberShape,
|
||||
PathTailShape, PathTailSyntax, VariablePathShape, VariableShape,
|
||||
};
|
||||
pub(crate) use self::expression::{AnyExpressionShape, AnyExpressionStartShape};
|
||||
pub(crate) use self::flat_shape::FlatShape;
|
||||
|
||||
use nu_protocol::SyntaxShape;
|
||||
use std::fmt::Debug;
|
||||
|
||||
impl ExpandSyntax for SyntaxShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
match self {
|
||||
SyntaxShape::Any => "shape[any]",
|
||||
SyntaxShape::Int => "shape[integer]",
|
||||
SyntaxShape::Range => "shape[range]",
|
||||
SyntaxShape::String => "shape[string]",
|
||||
SyntaxShape::Member => "shape[column name]",
|
||||
SyntaxShape::ColumnPath => "shape[column path]",
|
||||
SyntaxShape::Number => "shape[number]",
|
||||
SyntaxShape::Path => "shape[file path]",
|
||||
SyntaxShape::Pattern => "shape[glob pattern]",
|
||||
SyntaxShape::Block => "shape[block]",
|
||||
}
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
match self {
|
||||
SyntaxShape::Any => token_nodes.expand_syntax(AnyExpressionShape),
|
||||
SyntaxShape::Int => token_nodes
|
||||
.expand_syntax(IntExpressionShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
||||
SyntaxShape::Range => token_nodes
|
||||
.expand_syntax(RangeShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
||||
SyntaxShape::String => token_nodes
|
||||
.expand_syntax(CoerceStringShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
||||
SyntaxShape::Member => {
|
||||
let syntax = token_nodes.expand_syntax(MemberShape)?;
|
||||
Ok(syntax.to_expr())
|
||||
}
|
||||
SyntaxShape::ColumnPath => {
|
||||
let column_path = token_nodes.expand_syntax(ColumnPathShape)?;
|
||||
let ColumnPathSyntax {
|
||||
path: column_path,
|
||||
tag,
|
||||
} = column_path;
|
||||
|
||||
Ok(Expression::column_path(column_path).into_expr(tag.span))
|
||||
}
|
||||
SyntaxShape::Number => token_nodes
|
||||
.expand_syntax(NumberExpressionShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
||||
SyntaxShape::Path => token_nodes
|
||||
.expand_syntax(FilePathShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
||||
SyntaxShape::Pattern => token_nodes
|
||||
.expand_syntax(PatternShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
||||
SyntaxShape::Block => token_nodes
|
||||
.expand_syntax(CoerceBlockShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SignatureRegistry: Debug {
|
||||
fn has(&self, name: &str) -> bool;
|
||||
fn get(&self, name: &str) -> Option<Signature>;
|
||||
fn clone_box(&self) -> Box<dyn SignatureRegistry>;
|
||||
}
|
||||
|
||||
impl SignatureRegistry for Box<dyn SignatureRegistry> {
|
||||
fn has(&self, name: &str) -> bool {
|
||||
(&**self).has(name)
|
||||
}
|
||||
fn get(&self, name: &str) -> Option<Signature> {
|
||||
(&**self).get(name)
|
||||
}
|
||||
fn clone_box(&self) -> Box<dyn SignatureRegistry> {
|
||||
(&**self).clone_box()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Getters, new)]
|
||||
pub struct ExpandContext<'context> {
|
||||
#[get = "pub(crate)"]
|
||||
pub registry: Box<dyn SignatureRegistry>,
|
||||
pub source: &'context Text,
|
||||
pub homedir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl<'context> ExpandContext<'context> {
|
||||
pub(crate) fn homedir(&self) -> Option<&Path> {
|
||||
self.homedir.as_ref().map(|h| h.as_path())
|
||||
}
|
||||
|
||||
pub(crate) fn source(&self) -> &'context Text {
|
||||
self.source
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExpandSyntax: std::fmt::Debug + Clone {
|
||||
type Output: Clone + std::fmt::Debug + 'static;
|
||||
|
||||
fn name(&self) -> &'static str;
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output;
|
||||
}
|
||||
|
||||
pub fn fallible<T, S>(syntax: S) -> FallibleSyntax<S>
|
||||
where
|
||||
T: Clone + Debug + 'static,
|
||||
S: ExpandSyntax<Output = T>,
|
||||
{
|
||||
FallibleSyntax { inner: syntax }
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct FallibleSyntax<I> {
|
||||
inner: I,
|
||||
}
|
||||
|
||||
impl<I, T> ExpandSyntax for FallibleSyntax<I>
|
||||
where
|
||||
I: ExpandSyntax<Output = T>,
|
||||
T: Clone + Debug + 'static,
|
||||
{
|
||||
type Output = Result<T, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"fallible"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<T, ParseError> {
|
||||
Ok(self.inner.expand(token_nodes))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum BarePathState {
|
||||
Initial,
|
||||
Seen(Span, Span),
|
||||
Error(ParseError),
|
||||
}
|
||||
|
||||
impl BarePathState {
|
||||
pub fn seen(self, span: Span) -> BarePathState {
|
||||
match self {
|
||||
BarePathState::Initial => BarePathState::Seen(span, span),
|
||||
BarePathState::Seen(start, _) => BarePathState::Seen(start, span),
|
||||
BarePathState::Error(err) => BarePathState::Error(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn end(self, node: Option<&SpannedToken>, expected: &'static str) -> BarePathState {
|
||||
match self {
|
||||
BarePathState::Initial => match node {
|
||||
None => BarePathState::Error(ParseError::unexpected_eof(expected, Span::unknown())),
|
||||
Some(token) => {
|
||||
BarePathState::Error(ParseError::mismatch(expected, token.spanned_type_name()))
|
||||
}
|
||||
},
|
||||
BarePathState::Seen(start, end) => BarePathState::Seen(start, end),
|
||||
BarePathState::Error(err) => BarePathState::Error(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_bare(self) -> Result<Span, ParseError> {
|
||||
match self {
|
||||
BarePathState::Initial => unreachable!("into_bare in initial state"),
|
||||
BarePathState::Seen(start, end) => Ok(start.until(end)),
|
||||
BarePathState::Error(err) => Err(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_bare(
|
||||
token_nodes: &'_ mut TokensIterator<'_>,
|
||||
predicate: impl Fn(&SpannedToken) -> bool,
|
||||
) -> Result<Span, ParseError> {
|
||||
let mut state = BarePathState::Initial;
|
||||
|
||||
loop {
|
||||
if token_nodes.at_end() {
|
||||
state = state.end(None, "word");
|
||||
break;
|
||||
}
|
||||
|
||||
let source = token_nodes.source();
|
||||
|
||||
let mut peeked = token_nodes.peek();
|
||||
let node = peeked.node;
|
||||
|
||||
match node {
|
||||
Some(token) if predicate(token) => {
|
||||
peeked.commit();
|
||||
state = state.clone().seen(token.span());
|
||||
let shapes = FlatShape::shapes(token, &source);
|
||||
token_nodes.color_shapes(shapes);
|
||||
}
|
||||
token => {
|
||||
state = state.clone().end(token, "word");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
state.into_bare()
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BareExpressionShape;
|
||||
|
||||
impl ExpandSyntax for BareExpressionShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"bare expression"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
token_nodes
|
||||
.expand_syntax(BarePathShape)
|
||||
.map(|span| Expression::bare().into_expr(span))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BarePathShape;
|
||||
|
||||
impl ExpandSyntax for BarePathShape {
|
||||
type Output = Result<Span, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"bare path"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||
expand_bare(token_nodes, |token| match token.unspanned() {
|
||||
Token::Bare | Token::EvaluationOperator(EvaluationOperator::Dot) => true,
|
||||
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BareShape;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BareSyntax {
|
||||
pub word: String,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for BareSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for BareSyntax {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::primitive(&self.word)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for BareShape {
|
||||
type Output = Result<BareSyntax, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"word"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<BareSyntax, ParseError> {
|
||||
let source = token_nodes.source();
|
||||
|
||||
token_nodes.expand_token(WordType, |span| {
|
||||
Ok((
|
||||
FlatShape::Word,
|
||||
BareSyntax {
|
||||
word: span.string(&source),
|
||||
span,
|
||||
},
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CommandSignature {
|
||||
Internal(Spanned<Signature>),
|
||||
LiteralExternal { outer: Span, inner: Span },
|
||||
External(Span),
|
||||
Expression(hir::SpannedExpression),
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for CommandSignature {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
CommandSignature::Internal(internal) => {
|
||||
b::typed("command", b::description(&internal.name))
|
||||
}
|
||||
CommandSignature::LiteralExternal { outer, .. } => {
|
||||
b::typed("command", b::description(outer.slice(source)))
|
||||
}
|
||||
CommandSignature::External(external) => b::typed(
|
||||
"command",
|
||||
b::description("^") + b::description(external.slice(source)),
|
||||
),
|
||||
CommandSignature::Expression(expr) => b::typed("command", expr.pretty_debug(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for CommandSignature {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
CommandSignature::Internal(spanned) => spanned.span,
|
||||
CommandSignature::LiteralExternal { outer, .. } => *outer,
|
||||
CommandSignature::External(span) => *span,
|
||||
CommandSignature::Expression(expr) => expr.span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CommandSignature {
|
||||
pub fn to_expression(&self) -> hir::SpannedExpression {
|
||||
match self {
|
||||
CommandSignature::Internal(command) => {
|
||||
let span = command.span;
|
||||
hir::Expression::Command(span).into_expr(span)
|
||||
}
|
||||
CommandSignature::LiteralExternal { outer, inner } => {
|
||||
hir::Expression::ExternalCommand(hir::ExternalCommand::new(*inner))
|
||||
.into_expr(*outer)
|
||||
}
|
||||
CommandSignature::External(span) => {
|
||||
hir::Expression::ExternalCommand(hir::ExternalCommand::new(*span)).into_expr(*span)
|
||||
}
|
||||
CommandSignature::Expression(expr) => expr.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PipelineShape;
|
||||
|
||||
impl ExpandSyntax for PipelineShape {
|
||||
type Output = ClassifiedPipeline;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"pipeline"
|
||||
}
|
||||
|
||||
fn expand<'content, 'me>(
|
||||
&self,
|
||||
token_nodes: &'me mut TokensIterator<'content>,
|
||||
) -> ClassifiedPipeline {
|
||||
if token_nodes.at_end() {
|
||||
return ClassifiedPipeline::commands(vec![], Span::unknown());
|
||||
}
|
||||
|
||||
let start = token_nodes.span_at_cursor();
|
||||
|
||||
// whitespace is allowed at the beginning
|
||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||
|
||||
let pipeline = token_nodes
|
||||
.expand_token(PipelineType, |pipeline| Ok(((), pipeline)))
|
||||
.expect("PipelineShape is only expected to be called with a Pipeline token");
|
||||
|
||||
let parts = &pipeline.parts[..];
|
||||
|
||||
let mut out = vec![];
|
||||
|
||||
for part in parts {
|
||||
if let Some(span) = part.pipe {
|
||||
token_nodes.color_shape(FlatShape::Pipe.spanned(span));
|
||||
}
|
||||
|
||||
let tokens: Spanned<&[SpannedToken]> = part.tokens().spanned(part.span());
|
||||
|
||||
let (shapes, classified) = token_nodes.child(tokens, move |token_nodes| {
|
||||
token_nodes.expand_infallible(ClassifiedCommandShape)
|
||||
});
|
||||
|
||||
for shape in shapes {
|
||||
match shape {
|
||||
ShapeResult::Success(shape) => token_nodes.color_shape(shape),
|
||||
ShapeResult::Fallback { shape, allowed } => {
|
||||
token_nodes.color_err(shape, allowed)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out.push(classified);
|
||||
}
|
||||
|
||||
token_nodes.expand_infallible(BackoffColoringMode::new(vec!["no more tokens".to_string()]));
|
||||
|
||||
let end = token_nodes.span_at_cursor();
|
||||
|
||||
ClassifiedPipeline::commands(out, start.until(end))
|
||||
}
|
||||
}
|
||||
|
||||
pub enum CommandHeadKind {
|
||||
External,
|
||||
Internal(Signature),
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct CommandHeadShape;
|
||||
|
||||
impl ExpandSyntax for CommandHeadShape {
|
||||
type Output = Result<CommandSignature, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"command head"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<CommandSignature, ParseError> {
|
||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||
|
||||
let source = token_nodes.source();
|
||||
let registry = &token_nodes.context().registry.clone_box();
|
||||
|
||||
token_nodes
|
||||
.expand_token(ExternalCommandType, |(inner, outer)| {
|
||||
Ok((
|
||||
FlatShape::ExternalCommand,
|
||||
CommandSignature::LiteralExternal { outer, inner },
|
||||
))
|
||||
})
|
||||
.or_else(|_| {
|
||||
token_nodes.expand_token(WordType, |span| {
|
||||
let name = span.slice(&source);
|
||||
if registry.has(name) {
|
||||
let signature = registry.get(name).unwrap();
|
||||
Ok((
|
||||
FlatShape::InternalCommand,
|
||||
CommandSignature::Internal(signature.spanned(span)),
|
||||
))
|
||||
} else {
|
||||
Ok((FlatShape::ExternalCommand, CommandSignature::External(span)))
|
||||
}
|
||||
})
|
||||
})
|
||||
.or_else(|_| {
|
||||
token_nodes
|
||||
.expand_syntax(AnyExpressionShape)
|
||||
.map(CommandSignature::Expression)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ClassifiedCommandShape;
|
||||
|
||||
impl ExpandSyntax for ClassifiedCommandShape {
|
||||
type Output = ClassifiedCommand;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"classified command"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> ClassifiedCommand {
|
||||
let start = token_nodes.span_at_cursor();
|
||||
let source = token_nodes.source();
|
||||
|
||||
let head = match token_nodes.expand_syntax(CommandHeadShape) {
|
||||
Err(err) => {
|
||||
token_nodes
|
||||
.expand_infallible(BackoffColoringMode::new(vec!["command".to_string()]));
|
||||
return ClassifiedCommand::Error(err);
|
||||
}
|
||||
|
||||
Ok(head) => head,
|
||||
};
|
||||
|
||||
match head {
|
||||
CommandSignature::Expression(expr) => ClassifiedCommand::Error(ParseError::mismatch(
|
||||
"command",
|
||||
expr.type_name().spanned(expr.span),
|
||||
)),
|
||||
|
||||
CommandSignature::External(name) => {
|
||||
let name_str = name.slice(&source);
|
||||
|
||||
match external_command(token_nodes, name_str.tagged(name)) {
|
||||
Err(err) => ClassifiedCommand::Error(err),
|
||||
Ok(command) => command,
|
||||
}
|
||||
}
|
||||
|
||||
// If the command starts with `^`, treat it as an external command no matter what
|
||||
CommandSignature::LiteralExternal { outer, inner } => {
|
||||
let name_str = inner.slice(&source);
|
||||
|
||||
match external_command(token_nodes, name_str.tagged(outer)) {
|
||||
Err(err) => ClassifiedCommand::Error(err),
|
||||
Ok(command) => command,
|
||||
}
|
||||
}
|
||||
|
||||
CommandSignature::Internal(signature) => {
|
||||
let tail = parse_command_tail(&signature.item, token_nodes, signature.span);
|
||||
|
||||
let tail = match tail {
|
||||
Err(err) => {
|
||||
return ClassifiedCommand::Error(err);
|
||||
}
|
||||
Ok(tail) => tail,
|
||||
};
|
||||
|
||||
let (positional, named) = match tail {
|
||||
None => (None, None),
|
||||
Some((positional, named)) => (positional, named),
|
||||
};
|
||||
|
||||
let end = token_nodes.span_at_cursor();
|
||||
|
||||
let expr = hir::Expression::Command(signature.span).into_expr(signature.span);
|
||||
|
||||
let call = hir::Call {
|
||||
head: Box::new(expr),
|
||||
positional,
|
||||
named,
|
||||
span: start.until(end),
|
||||
};
|
||||
|
||||
ClassifiedCommand::Internal(InternalCommand::new(
|
||||
signature.item.name.clone(),
|
||||
Tag {
|
||||
span: signature.span,
|
||||
anchor: None,
|
||||
},
|
||||
call,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct MaybeWhitespaceEof;
|
||||
|
||||
impl ExpandSyntax for MaybeWhitespaceEof {
|
||||
type Output = Result<(), ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"<whitespace? eof>"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||
token_nodes.expand_syntax(EofShape)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct EofShape;
|
||||
|
||||
impl ExpandSyntax for EofShape {
|
||||
type Output = Result<(), ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"eof"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<(), ParseError> {
|
||||
let next = token_nodes.peek();
|
||||
let node = next.node;
|
||||
|
||||
match node {
|
||||
None => Ok(()),
|
||||
Some(node) => Err(ParseError::mismatch("eof", node.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct WhitespaceShape;
|
||||
|
||||
impl ExpandSyntax for WhitespaceShape {
|
||||
type Output = Result<Span, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"whitespace"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||
token_nodes.expand_token(WhitespaceType, |span| Ok((FlatShape::Whitespace, span)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct MaybeSpaceShape;
|
||||
|
||||
impl ExpandSyntax for MaybeSpaceShape {
|
||||
type Output = Option<Span>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"whitespace?"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Option<Span> {
|
||||
let result = token_nodes.expand_token(WhitespaceType, |span| {
|
||||
Ok((FlatShape::Whitespace, Some(span)))
|
||||
});
|
||||
|
||||
// No space is acceptable, but we need to err inside expand_token so we don't
|
||||
// consume the non-whitespace token
|
||||
result.unwrap_or(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct SpaceShape;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct CommandShape;
|
159
crates/nu-parser/src/hir/syntax_shape/block.rs
Normal file
159
crates/nu-parser/src/hir/syntax_shape/block.rs
Normal file
@ -0,0 +1,159 @@
|
||||
use crate::hir::Expression;
|
||||
use crate::{
|
||||
hir,
|
||||
hir::syntax_shape::{
|
||||
ExpandSyntax, ExpressionContinuationShape, MemberShape, PathTailShape, PathTailSyntax,
|
||||
VariablePathShape,
|
||||
},
|
||||
hir::tokens_iterator::TokensIterator,
|
||||
};
|
||||
use hir::SpannedExpression;
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::Span;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct CoerceBlockShape;
|
||||
|
||||
impl ExpandSyntax for CoerceBlockShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"any block"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
// is it just a block?
|
||||
token_nodes
|
||||
.expand_syntax(BlockShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(ShorthandBlockShape))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BlockShape;
|
||||
|
||||
impl ExpandSyntax for BlockShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"block"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
let exprs = token_nodes.block()?;
|
||||
|
||||
Ok(hir::Expression::Block(exprs.item).into_expr(exprs.span))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ShorthandBlockShape;
|
||||
|
||||
impl ExpandSyntax for ShorthandBlockShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"shorthand block"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
let mut current = token_nodes.expand_syntax(ShorthandPath)?;
|
||||
|
||||
loop {
|
||||
match token_nodes.expand_syntax(ExpressionContinuationShape) {
|
||||
Result::Err(_) => break,
|
||||
Result::Ok(continuation) => current = continuation.append_to(current),
|
||||
}
|
||||
}
|
||||
let span = current.span;
|
||||
|
||||
let block = hir::Expression::Block(vec![current]).into_expr(span);
|
||||
|
||||
Ok(block)
|
||||
}
|
||||
}
|
||||
|
||||
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ShorthandPath;
|
||||
|
||||
impl ExpandSyntax for ShorthandPath {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"shorthand path"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
// if it's a variable path, that's the head part
|
||||
let path = token_nodes.expand_syntax(VariablePathShape);
|
||||
|
||||
if let Ok(path) = path {
|
||||
return Ok(path);
|
||||
}
|
||||
|
||||
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
|
||||
let mut head = token_nodes.expand_syntax(ShorthandHeadShape)?;
|
||||
|
||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
||||
// like any other path.
|
||||
let tail = token_nodes.expand_syntax(PathTailShape);
|
||||
|
||||
match tail {
|
||||
Err(_) => Ok(head),
|
||||
Ok(PathTailSyntax { tail, span }) => {
|
||||
let span = head.span.until(span);
|
||||
|
||||
// For each member that `PathTailShape` expanded, join it onto the existing expression
|
||||
// to form a new path
|
||||
for member in tail {
|
||||
head = Expression::dot_member(head, member).into_expr(span);
|
||||
}
|
||||
|
||||
Ok(head)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ShorthandHeadShape;
|
||||
|
||||
impl ExpandSyntax for ShorthandHeadShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"shorthand head"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
let head = token_nodes.expand_syntax(MemberShape)?;
|
||||
let head = head.to_path_member(&token_nodes.source());
|
||||
|
||||
// Synthesize an `$it` expression
|
||||
let it = synthetic_it();
|
||||
let span = head.span;
|
||||
|
||||
Ok(Expression::path(it, vec![head]).into_expr(span))
|
||||
}
|
||||
}
|
||||
|
||||
fn synthetic_it() -> hir::SpannedExpression {
|
||||
Expression::it_variable(Span::unknown()).into_expr(Span::unknown())
|
||||
}
|
72
crates/nu-parser/src/hir/syntax_shape/design.md
Normal file
72
crates/nu-parser/src/hir/syntax_shape/design.md
Normal file
@ -0,0 +1,72 @@
|
||||
# Meaningful Primitive Tokens
|
||||
|
||||
- `int`
|
||||
- `decimal`
|
||||
- `op::name`
|
||||
- `dot`
|
||||
- `dotdot`
|
||||
- `string`
|
||||
- `var::it`
|
||||
- `var::other`
|
||||
- `external-command`
|
||||
- `pattern::glob`
|
||||
- `word`
|
||||
- `comment`
|
||||
- `whitespace`
|
||||
- `separator`
|
||||
- `longhand-flag`
|
||||
- `shorthand-flag`
|
||||
|
||||
# Grouped Tokens
|
||||
|
||||
- `(call head ...tail)`
|
||||
- `(list ...nodes)`
|
||||
- `(paren ...nodes)`
|
||||
- `(square ...nodes)`
|
||||
- `(curly ...nodes)`
|
||||
- `(pipeline ...elements) where elements: pipeline-element`
|
||||
- `(pipeline-element pipe? token)`
|
||||
|
||||
# Atomic Tokens
|
||||
|
||||
- `(unit number unit) where number: number, unit: unit`
|
||||
|
||||
# Expression
|
||||
|
||||
```
|
||||
start(ExpressionStart) continuation(ExpressionContinuation)* ->
|
||||
```
|
||||
|
||||
## ExpressionStart
|
||||
|
||||
```
|
||||
word -> String
|
||||
unit -> Unit
|
||||
number -> Number
|
||||
string -> String
|
||||
var::it -> Var::It
|
||||
var::other -> Var::Other
|
||||
pattern::glob -> Pattern::Glob
|
||||
square -> Array
|
||||
```
|
||||
|
||||
## TightExpressionContinuation
|
||||
|
||||
```
|
||||
dot AnyExpression -> Member
|
||||
dodot AnyExpression -> RangeContinuation
|
||||
```
|
||||
|
||||
## InfixExpressionContinuation
|
||||
|
||||
```
|
||||
whitespace op whitespace AnyExpression -> InfixContinuation
|
||||
```
|
||||
|
||||
## Member
|
||||
|
||||
```
|
||||
int -> Member::Int
|
||||
word -> Member::Word
|
||||
string -> Member::String
|
||||
```
|
77
crates/nu-parser/src/hir/syntax_shape/expression.rs
Normal file
77
crates/nu-parser/src/hir/syntax_shape/expression.rs
Normal file
@ -0,0 +1,77 @@
|
||||
pub(crate) mod delimited;
|
||||
pub(crate) mod file_path;
|
||||
pub(crate) mod list;
|
||||
pub(crate) mod number;
|
||||
pub(crate) mod pattern;
|
||||
pub(crate) mod range;
|
||||
pub(crate) mod string;
|
||||
pub(crate) mod unit;
|
||||
pub(crate) mod variable_path;
|
||||
|
||||
use crate::hir::syntax_shape::{
|
||||
BareExpressionShape, DelimitedSquareShape, ExpandContext, ExpandSyntax,
|
||||
ExpressionContinuationShape, NumberExpressionShape, PatternExpressionShape,
|
||||
StringExpressionShape, UnitExpressionShape, VariableShape,
|
||||
};
|
||||
use crate::hir::{SpannedExpression, TokensIterator};
|
||||
use nu_errors::ParseError;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct AnyExpressionShape;
|
||||
|
||||
impl ExpandSyntax for AnyExpressionShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"any expression"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
// Look for an atomic expression at the cursor
|
||||
let mut current = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
||||
|
||||
loop {
|
||||
match token_nodes.expand_syntax(ExpressionContinuationShape) {
|
||||
Err(_) => return Ok(current),
|
||||
Ok(continuation) => current = continuation.append_to(current),
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct AnyExpressionStartShape;
|
||||
|
||||
impl ExpandSyntax for AnyExpressionStartShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"any expression start"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
token_nodes
|
||||
.expand_syntax(VariableShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(UnitExpressionShape))
|
||||
.or_else(|_| token_nodes.expand_syntax(BareExpressionShape))
|
||||
.or_else(|_| token_nodes.expand_syntax(PatternExpressionShape))
|
||||
.or_else(|_| token_nodes.expand_syntax(NumberExpressionShape))
|
||||
.or_else(|_| token_nodes.expand_syntax(StringExpressionShape))
|
||||
.or_else(|_| token_nodes.expand_syntax(DelimitedSquareShape))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf {
|
||||
let expanded = shellexpand::tilde_with_context(string, || context.homedir());
|
||||
|
||||
PathBuf::from(expanded.as_ref())
|
||||
}
|
@ -1,15 +1,20 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
use crate::hir::syntax_shape::FlatShape;
|
||||
use crate::hir::syntax_shape::{
|
||||
expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape,
|
||||
BarePatternShape, ExpandContext, UnitShape, UnitSyntax,
|
||||
};
|
||||
use crate::parser::{
|
||||
use crate::parse::operator::EvaluationOperator;
|
||||
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||
use crate::parse::tokens::UnspannedToken;
|
||||
use crate::parse::unit::Unit;
|
||||
use crate::{
|
||||
hir,
|
||||
hir::{Expression, RawNumber, TokensIterator},
|
||||
parse::flag::{Flag, FlagKind},
|
||||
DelimitedNode, Delimiter, FlatShape, TokenNode, Unit, UnspannedToken,
|
||||
};
|
||||
use crate::prelude::*;
|
||||
use nu_source::Spanned;
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||
use std::ops::Deref;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@ -48,23 +53,36 @@ pub enum UnspannedAtomicToken<'tokens> {
|
||||
Word {
|
||||
text: Span,
|
||||
},
|
||||
#[allow(unused)]
|
||||
Dot {
|
||||
text: Span,
|
||||
},
|
||||
SquareDelimited {
|
||||
spans: (Span, Span),
|
||||
nodes: &'tokens Vec<TokenNode>,
|
||||
},
|
||||
#[allow(unused)]
|
||||
RoundDelimited {
|
||||
spans: (Span, Span),
|
||||
nodes: &'tokens Vec<TokenNode>,
|
||||
},
|
||||
ShorthandFlag {
|
||||
name: Span,
|
||||
},
|
||||
Operator {
|
||||
CompareOperator {
|
||||
text: Span,
|
||||
},
|
||||
Dot {
|
||||
text: Span,
|
||||
},
|
||||
DotDot {
|
||||
text: Span,
|
||||
},
|
||||
Whitespace {
|
||||
text: Span,
|
||||
},
|
||||
Separator {
|
||||
text: Span,
|
||||
},
|
||||
Comment {
|
||||
body: Span,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'tokens> UnspannedAtomicToken<'tokens> {
|
||||
@ -76,15 +94,24 @@ impl<'tokens> UnspannedAtomicToken<'tokens> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tokens> ShellTypeName for AtomicToken<'tokens> {
|
||||
fn type_name(&self) -> &'static str {
|
||||
self.unspanned.type_name()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tokens> ShellTypeName for UnspannedAtomicToken<'tokens> {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match &self {
|
||||
UnspannedAtomicToken::Eof { .. } => "eof",
|
||||
UnspannedAtomicToken::Error { .. } => "error",
|
||||
UnspannedAtomicToken::Operator { .. } => "operator",
|
||||
UnspannedAtomicToken::CompareOperator { .. } => "compare operator",
|
||||
UnspannedAtomicToken::ShorthandFlag { .. } => "shorthand flag",
|
||||
UnspannedAtomicToken::Whitespace { .. } => "whitespace",
|
||||
UnspannedAtomicToken::Separator { .. } => "separator",
|
||||
UnspannedAtomicToken::Comment { .. } => "comment",
|
||||
UnspannedAtomicToken::Dot { .. } => "dot",
|
||||
UnspannedAtomicToken::DotDot { .. } => "dotdot",
|
||||
UnspannedAtomicToken::Number { .. } => "number",
|
||||
UnspannedAtomicToken::Size { .. } => "size",
|
||||
UnspannedAtomicToken::String { .. } => "string",
|
||||
@ -95,6 +122,7 @@ impl<'tokens> ShellTypeName for UnspannedAtomicToken<'tokens> {
|
||||
UnspannedAtomicToken::GlobPattern { .. } => "file pattern",
|
||||
UnspannedAtomicToken::Word { .. } => "word",
|
||||
UnspannedAtomicToken::SquareDelimited { .. } => "array literal",
|
||||
UnspannedAtomicToken::RoundDelimited { .. } => "paren delimited",
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -105,6 +133,12 @@ pub struct AtomicToken<'tokens> {
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl<'tokens> HasSpan for AtomicToken<'tokens> {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tokens> Deref for AtomicToken<'tokens> {
|
||||
type Target = UnspannedAtomicToken<'tokens>;
|
||||
|
||||
@ -114,7 +148,7 @@ impl<'tokens> Deref for AtomicToken<'tokens> {
|
||||
}
|
||||
|
||||
impl<'tokens> AtomicToken<'tokens> {
|
||||
pub fn into_hir(
|
||||
pub fn to_hir(
|
||||
&self,
|
||||
context: &ExpandContext,
|
||||
expected: &'static str,
|
||||
@ -127,31 +161,18 @@ impl<'tokens> AtomicToken<'tokens> {
|
||||
))
|
||||
}
|
||||
UnspannedAtomicToken::Error { .. } => {
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"eof atomic token".spanned(self.span),
|
||||
))
|
||||
return Err(ParseError::mismatch(expected, "error".spanned(self.span)))
|
||||
}
|
||||
UnspannedAtomicToken::Operator { .. } => {
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"operator".spanned(self.span),
|
||||
))
|
||||
}
|
||||
UnspannedAtomicToken::ShorthandFlag { .. } => {
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"shorthand flag".spanned(self.span),
|
||||
))
|
||||
}
|
||||
UnspannedAtomicToken::Whitespace { .. } => {
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"whitespace".spanned(self.span),
|
||||
))
|
||||
}
|
||||
UnspannedAtomicToken::Dot { .. } => {
|
||||
return Err(ParseError::mismatch(expected, "dot".spanned(self.span)))
|
||||
UnspannedAtomicToken::RoundDelimited { .. }
|
||||
| UnspannedAtomicToken::CompareOperator { .. }
|
||||
| UnspannedAtomicToken::ShorthandFlag { .. }
|
||||
| UnspannedAtomicToken::Whitespace { .. }
|
||||
| UnspannedAtomicToken::Separator { .. }
|
||||
| UnspannedAtomicToken::Comment { .. }
|
||||
| UnspannedAtomicToken::Dot { .. }
|
||||
| UnspannedAtomicToken::DotDot { .. }
|
||||
| UnspannedAtomicToken::SquareDelimited { .. } => {
|
||||
return Err(ParseError::mismatch(expected, self.spanned_type_name()));
|
||||
}
|
||||
UnspannedAtomicToken::Number { number } => {
|
||||
Expression::number(number.to_number(context.source), self.span)
|
||||
@ -171,89 +192,55 @@ impl<'tokens> AtomicToken<'tokens> {
|
||||
self.span,
|
||||
),
|
||||
UnspannedAtomicToken::Word { text } => Expression::string(*text, *text),
|
||||
UnspannedAtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(coloring_in_tokens))]
|
||||
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
|
||||
match &self.unspanned {
|
||||
UnspannedAtomicToken::Eof { .. } => "eof",
|
||||
UnspannedAtomicToken::Error { .. } => "error",
|
||||
UnspannedAtomicToken::Operator { .. } => "operator",
|
||||
UnspannedAtomicToken::ShorthandFlag { .. } => "shorthand flag",
|
||||
UnspannedAtomicToken::Whitespace { .. } => "whitespace",
|
||||
UnspannedAtomicToken::Dot { .. } => "dot",
|
||||
UnspannedAtomicToken::Number { .. } => "number",
|
||||
UnspannedAtomicToken::Size { .. } => "size",
|
||||
UnspannedAtomicToken::String { .. } => "string",
|
||||
UnspannedAtomicToken::ItVariable { .. } => "$it",
|
||||
UnspannedAtomicToken::Variable { .. } => "variable",
|
||||
UnspannedAtomicToken::ExternalCommand { .. } => "external command",
|
||||
UnspannedAtomicToken::ExternalWord { .. } => "external word",
|
||||
UnspannedAtomicToken::GlobPattern { .. } => "file pattern",
|
||||
UnspannedAtomicToken::Word { .. } => "word",
|
||||
UnspannedAtomicToken::SquareDelimited { .. } => "array literal",
|
||||
}
|
||||
.spanned(self.span)
|
||||
}
|
||||
|
||||
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
|
||||
match &self.unspanned {
|
||||
UnspannedAtomicToken::Eof { .. } => {}
|
||||
UnspannedAtomicToken::Error { .. } => {
|
||||
return shapes.push(FlatShape::Error.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::Operator { .. } => {
|
||||
return shapes.push(FlatShape::Operator.spanned(self.span));
|
||||
UnspannedAtomicToken::Error { .. } => shapes.push(FlatShape::Error.spanned(self.span)),
|
||||
UnspannedAtomicToken::CompareOperator { .. } => {
|
||||
shapes.push(FlatShape::CompareOperator.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::ShorthandFlag { .. } => {
|
||||
return shapes.push(FlatShape::ShorthandFlag.spanned(self.span));
|
||||
shapes.push(FlatShape::ShorthandFlag.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::Whitespace { .. } => {
|
||||
return shapes.push(FlatShape::Whitespace.spanned(self.span));
|
||||
shapes.push(FlatShape::Whitespace.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::Number {
|
||||
number: RawNumber::Decimal(_),
|
||||
} => {
|
||||
return shapes.push(FlatShape::Decimal.spanned(self.span));
|
||||
}
|
||||
} => shapes.push(FlatShape::Decimal.spanned(self.span)),
|
||||
UnspannedAtomicToken::Number {
|
||||
number: RawNumber::Int(_),
|
||||
} => {
|
||||
return shapes.push(FlatShape::Int.spanned(self.span));
|
||||
}
|
||||
UnspannedAtomicToken::Size { number, unit } => {
|
||||
return shapes.push(
|
||||
} => shapes.push(FlatShape::Int.spanned(self.span)),
|
||||
UnspannedAtomicToken::Size { number, unit } => shapes.push(
|
||||
FlatShape::Size {
|
||||
number: number.span(),
|
||||
unit: unit.span,
|
||||
}
|
||||
.spanned(self.span),
|
||||
);
|
||||
}
|
||||
),
|
||||
UnspannedAtomicToken::String { .. } => {
|
||||
return shapes.push(FlatShape::String.spanned(self.span))
|
||||
shapes.push(FlatShape::String.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::ItVariable { .. } => {
|
||||
return shapes.push(FlatShape::ItVariable.spanned(self.span))
|
||||
shapes.push(FlatShape::ItVariable.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::Variable { .. } => {
|
||||
return shapes.push(FlatShape::Variable.spanned(self.span))
|
||||
shapes.push(FlatShape::Variable.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::ExternalCommand { .. } => {
|
||||
return shapes.push(FlatShape::ExternalCommand.spanned(self.span));
|
||||
shapes.push(FlatShape::ExternalCommand.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::ExternalWord { .. } => {
|
||||
return shapes.push(FlatShape::ExternalWord.spanned(self.span))
|
||||
shapes.push(FlatShape::ExternalWord.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::GlobPattern { .. } => {
|
||||
return shapes.push(FlatShape::GlobPattern.spanned(self.span))
|
||||
shapes.push(FlatShape::GlobPattern.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::Word { .. } => {
|
||||
return shapes.push(FlatShape::Word.spanned(self.span))
|
||||
}
|
||||
_ => return shapes.push(FlatShape::Error.spanned(self.span)),
|
||||
UnspannedAtomicToken::Word { .. } => shapes.push(FlatShape::Word.spanned(self.span)),
|
||||
_ => shapes.push(FlatShape::Error.spanned(self.span)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -301,17 +288,30 @@ impl PrettyDebugWithSource for AtomicToken<'_> {
|
||||
b::intersperse_with_source(nodes.iter(), b::space(), source),
|
||||
"]",
|
||||
),
|
||||
UnspannedAtomicToken::RoundDelimited { nodes, .. } => b::delimit(
|
||||
"(",
|
||||
b::intersperse_with_source(nodes.iter(), b::space(), source),
|
||||
")",
|
||||
),
|
||||
UnspannedAtomicToken::ShorthandFlag { name } => {
|
||||
atom_kind("shorthand flag", b::key(name.slice(source)))
|
||||
}
|
||||
UnspannedAtomicToken::Dot { .. } => atom(b::kind("dot")),
|
||||
UnspannedAtomicToken::Operator { text } => {
|
||||
UnspannedAtomicToken::DotDot { .. } => atom(b::kind("dotdot")),
|
||||
UnspannedAtomicToken::CompareOperator { text } => {
|
||||
atom_kind("operator", b::keyword(text.slice(source)))
|
||||
}
|
||||
UnspannedAtomicToken::Whitespace { text } => atom_kind(
|
||||
"whitespace",
|
||||
b::description(format!("{:?}", text.slice(source))),
|
||||
),
|
||||
UnspannedAtomicToken::Separator { text } => atom_kind(
|
||||
"separator",
|
||||
b::description(format!("{:?}", text.slice(source))),
|
||||
),
|
||||
UnspannedAtomicToken::Comment { body } => {
|
||||
atom_kind("comment", b::description(body.slice(source)))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -327,12 +327,15 @@ pub enum WhitespaceHandling {
|
||||
pub struct ExpansionRule {
|
||||
pub(crate) allow_external_command: bool,
|
||||
pub(crate) allow_external_word: bool,
|
||||
pub(crate) allow_operator: bool,
|
||||
pub(crate) allow_cmp_operator: bool,
|
||||
pub(crate) allow_eval_operator: bool,
|
||||
pub(crate) allow_eof: bool,
|
||||
pub(crate) allow_separator: bool,
|
||||
pub(crate) treat_size_as_word: bool,
|
||||
pub(crate) separate_members: bool,
|
||||
pub(crate) commit_errors: bool,
|
||||
pub(crate) whitespace: WhitespaceHandling,
|
||||
pub(crate) allow_comments: bool,
|
||||
}
|
||||
|
||||
impl ExpansionRule {
|
||||
@ -340,12 +343,15 @@ impl ExpansionRule {
|
||||
ExpansionRule {
|
||||
allow_external_command: false,
|
||||
allow_external_word: false,
|
||||
allow_operator: false,
|
||||
allow_eval_operator: false,
|
||||
allow_cmp_operator: false,
|
||||
allow_eof: false,
|
||||
treat_size_as_word: false,
|
||||
separate_members: false,
|
||||
commit_errors: false,
|
||||
allow_separator: false,
|
||||
whitespace: WhitespaceHandling::RejectWhitespace,
|
||||
allow_comments: false,
|
||||
}
|
||||
}
|
||||
|
||||
@ -356,11 +362,14 @@ impl ExpansionRule {
|
||||
ExpansionRule {
|
||||
allow_external_command: true,
|
||||
allow_external_word: true,
|
||||
allow_operator: true,
|
||||
allow_cmp_operator: true,
|
||||
allow_eval_operator: true,
|
||||
allow_eof: true,
|
||||
separate_members: false,
|
||||
treat_size_as_word: false,
|
||||
commit_errors: true,
|
||||
allow_separator: true,
|
||||
allow_comments: true,
|
||||
whitespace: WhitespaceHandling::AllowWhitespace,
|
||||
}
|
||||
}
|
||||
@ -372,14 +381,26 @@ impl ExpansionRule {
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_operator(mut self) -> ExpansionRule {
|
||||
self.allow_operator = true;
|
||||
pub fn allow_cmp_operator(mut self) -> ExpansionRule {
|
||||
self.allow_cmp_operator = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn no_cmp_operator(mut self) -> ExpansionRule {
|
||||
self.allow_cmp_operator = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_eval_operator(mut self) -> ExpansionRule {
|
||||
self.allow_eval_operator = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn no_operator(mut self) -> ExpansionRule {
|
||||
self.allow_operator = false;
|
||||
self.allow_eval_operator = false;
|
||||
self
|
||||
}
|
||||
|
||||
@ -436,6 +457,30 @@ impl ExpansionRule {
|
||||
self.whitespace = WhitespaceHandling::RejectWhitespace;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_separator(mut self) -> ExpansionRule {
|
||||
self.allow_separator = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn reject_separator(mut self) -> ExpansionRule {
|
||||
self.allow_separator = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_comments(mut self) -> ExpansionRule {
|
||||
self.allow_comments = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn reject_comments(mut self) -> ExpansionRule {
|
||||
self.allow_comments = false;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_atom<'me, 'content>(
|
||||
@ -469,14 +514,13 @@ fn expand_atom_inner<'me, 'content>(
|
||||
rule: ExpansionRule,
|
||||
) -> Result<AtomicToken<'content>, ParseError> {
|
||||
if token_nodes.at_end() {
|
||||
match rule.allow_eof {
|
||||
true => {
|
||||
if rule.allow_eof {
|
||||
return Ok(UnspannedAtomicToken::Eof {
|
||||
span: Span::unknown(),
|
||||
}
|
||||
.into_atomic_token(Span::unknown()))
|
||||
}
|
||||
false => return Err(ParseError::unexpected_eof("anything", Span::unknown())),
|
||||
.into_atomic_token(Span::unknown()));
|
||||
} else {
|
||||
return Err(ParseError::unexpected_eof("anything", Span::unknown()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -485,9 +529,8 @@ fn expand_atom_inner<'me, 'content>(
|
||||
|
||||
// If treat_size_as_word, don't try to parse the head of the token stream
|
||||
// as a size.
|
||||
match rule.treat_size_as_word {
|
||||
true => {}
|
||||
false => match expand_syntax(&UnitShape, token_nodes, context) {
|
||||
if !rule.treat_size_as_word {
|
||||
match expand_syntax(&UnitShape, token_nodes, context) {
|
||||
// If the head of the stream isn't a valid unit, we'll try to parse
|
||||
// it again next as a word
|
||||
Err(_) => {}
|
||||
@ -497,12 +540,10 @@ fn expand_atom_inner<'me, 'content>(
|
||||
unit: (number, unit),
|
||||
span,
|
||||
}) => return Ok(UnspannedAtomicToken::Size { number, unit }.into_atomic_token(span)),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
match rule.separate_members {
|
||||
false => {}
|
||||
true => {
|
||||
if rule.separate_members {
|
||||
let mut next = token_nodes.peek_any();
|
||||
|
||||
match next.node {
|
||||
@ -523,7 +564,6 @@ fn expand_atom_inner<'me, 'content>(
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to parse the head of the stream as a bare path. A bare path includes
|
||||
// words as well as `.`s, connected together without whitespace.
|
||||
@ -574,6 +614,17 @@ fn expand_atom_inner<'me, 'content>(
|
||||
.into_atomic_token(error.span));
|
||||
}
|
||||
|
||||
TokenNode::Separator(span) if rule.allow_separator => {
|
||||
peeked.commit();
|
||||
return Ok(UnspannedAtomicToken::Separator { text: *span }.into_atomic_token(span));
|
||||
}
|
||||
|
||||
TokenNode::Comment(comment) if rule.allow_comments => {
|
||||
peeked.commit();
|
||||
return Ok(UnspannedAtomicToken::Comment { body: comment.text }
|
||||
.into_atomic_token(comment.span()));
|
||||
}
|
||||
|
||||
// [ ... ]
|
||||
TokenNode::Delimited(Spanned {
|
||||
item:
|
||||
@ -645,8 +696,16 @@ fn expand_atom_inner<'me, 'content>(
|
||||
// First, the error cases. Each error case corresponds to a expansion rule
|
||||
// flag that can be used to allow the case
|
||||
|
||||
// rule.allow_operator
|
||||
UnspannedToken::Operator(_) if !rule.allow_operator => return Err(err.error()),
|
||||
// rule.allow_cmp_operator
|
||||
UnspannedToken::CompareOperator(_) if !rule.allow_cmp_operator => {
|
||||
return Err(err.error())
|
||||
}
|
||||
|
||||
// rule.allow_eval_operator
|
||||
UnspannedToken::EvaluationOperator(_) if !rule.allow_eval_operator => {
|
||||
return Err(err.error())
|
||||
}
|
||||
|
||||
// rule.allow_external_command
|
||||
UnspannedToken::ExternalCommand(_) if !rule.allow_external_command => {
|
||||
return Err(ParseError::mismatch(
|
||||
@ -665,8 +724,15 @@ fn expand_atom_inner<'me, 'content>(
|
||||
UnspannedToken::Number(number) => {
|
||||
UnspannedAtomicToken::Number { number }.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::Operator(_) => {
|
||||
UnspannedAtomicToken::Operator { text: token_span }.into_atomic_token(token_span)
|
||||
UnspannedToken::CompareOperator(_) => {
|
||||
UnspannedAtomicToken::CompareOperator { text: token_span }
|
||||
.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => {
|
||||
UnspannedAtomicToken::Dot { text: token_span }.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => {
|
||||
UnspannedAtomicToken::DotDot { text: token_span }.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::String(body) => {
|
||||
UnspannedAtomicToken::String { body }.into_atomic_token(token_span)
|
@ -0,0 +1,24 @@
|
||||
use crate::hir::syntax_shape::ExpandSyntax;
|
||||
use crate::hir::SpannedExpression;
|
||||
use crate::{hir, hir::TokensIterator};
|
||||
use nu_errors::ParseError;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct DelimitedSquareShape;
|
||||
|
||||
impl ExpandSyntax for DelimitedSquareShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"delimited square"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
let exprs = token_nodes.square()?;
|
||||
|
||||
Ok(hir::Expression::list(exprs.item).into_expr(exprs.span))
|
||||
}
|
||||
}
|
@ -0,0 +1,62 @@
|
||||
use crate::hir::syntax_shape::{
|
||||
expression::expand_file_path, BarePathShape, DecimalShape, ExpandContext, ExpandSyntax,
|
||||
FlatShape, IntShape, StringShape,
|
||||
};
|
||||
use crate::hir::{Expression, SpannedExpression, TokensIterator};
|
||||
use crate::parse::token_tree::ExternalWordType;
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::{HasSpan, Span};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct FilePathShape;
|
||||
|
||||
impl ExpandSyntax for FilePathShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"file path"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
token_nodes
|
||||
.expand_syntax(BarePathShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(ExternalWordShape))
|
||||
.map(|span| file_path(span, token_nodes.context()).into_expr(span))
|
||||
.or_else(|_| {
|
||||
token_nodes.expand_syntax(StringShape).map(|syntax| {
|
||||
file_path(syntax.inner, token_nodes.context()).into_expr(syntax.span)
|
||||
})
|
||||
})
|
||||
.or_else(|_| {
|
||||
token_nodes
|
||||
.expand_syntax(IntShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(DecimalShape))
|
||||
.map(|number| {
|
||||
file_path(number.span(), token_nodes.context()).into_expr(number.span())
|
||||
})
|
||||
})
|
||||
.map_err(|_| token_nodes.err_next_token("file path"))
|
||||
}
|
||||
}
|
||||
|
||||
fn file_path(text: Span, context: &ExpandContext) -> Expression {
|
||||
Expression::FilePath(expand_file_path(text.slice(context.source), context))
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExternalWordShape;
|
||||
|
||||
impl ExpandSyntax for ExternalWordShape {
|
||||
type Output = Result<Span, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"external word"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||
token_nodes.expand_token(ExternalWordType, |span| Ok((FlatShape::ExternalWord, span)))
|
||||
}
|
||||
}
|
170
crates/nu-parser/src/hir/syntax_shape/expression/list.rs
Normal file
170
crates/nu-parser/src/hir/syntax_shape/expression/list.rs
Normal file
@ -0,0 +1,170 @@
|
||||
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||
use crate::{
|
||||
hir,
|
||||
hir::syntax_shape::{AnyExpressionShape, ExpandSyntax, MaybeSpaceShape},
|
||||
hir::TokensIterator,
|
||||
};
|
||||
use derive_new::new;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExpressionListSyntax {
|
||||
pub exprs: Spanned<Vec<hir::SpannedExpression>>,
|
||||
}
|
||||
|
||||
impl HasSpan for ExpressionListSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.exprs.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ExpressionListSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.exprs.iter().map(|e| e.pretty_debug(source)),
|
||||
b::space(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExpressionListShape;
|
||||
|
||||
impl ExpandSyntax for ExpressionListShape {
|
||||
type Output = ExpressionListSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"expression list"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &mut TokensIterator<'_>) -> ExpressionListSyntax {
|
||||
// We encountered a parsing error and will continue with simpler coloring ("backoff
|
||||
// coloring mode")
|
||||
let mut backoff = false;
|
||||
|
||||
let mut exprs = vec![];
|
||||
|
||||
let start = token_nodes.span_at_cursor();
|
||||
|
||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||
|
||||
if token_nodes.at_end() {
|
||||
return ExpressionListSyntax {
|
||||
exprs: exprs.spanned(start),
|
||||
};
|
||||
}
|
||||
|
||||
let expr = token_nodes.expand_syntax(AnyExpressionShape);
|
||||
|
||||
match expr {
|
||||
Ok(expr) => exprs.push(expr),
|
||||
Err(_) => backoff = true,
|
||||
}
|
||||
|
||||
loop {
|
||||
if token_nodes.at_end() {
|
||||
let end = token_nodes.span_at_cursor();
|
||||
return ExpressionListSyntax {
|
||||
exprs: exprs.spanned(start.until(end)),
|
||||
};
|
||||
}
|
||||
|
||||
if backoff {
|
||||
let len = token_nodes.state().shapes().len();
|
||||
|
||||
// If we previously encountered a parsing error, use backoff coloring mode
|
||||
token_nodes
|
||||
.expand_infallible(SimplestExpression::new(vec!["expression".to_string()]));
|
||||
|
||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
||||
// This should never happen, but if it does, a panic is better than an infinite loop
|
||||
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
|
||||
}
|
||||
} else {
|
||||
let expr = token_nodes.atomic_parse(|token_nodes| {
|
||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||
token_nodes.expand_syntax(AnyExpressionShape)
|
||||
});
|
||||
|
||||
match expr {
|
||||
Ok(expr) => exprs.push(expr),
|
||||
Err(_) => {
|
||||
backoff = true;
|
||||
}
|
||||
}
|
||||
// Otherwise, move on to the next expression
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
|
||||
#[derive(Debug, Clone, new)]
|
||||
pub struct BackoffColoringMode {
|
||||
allowed: Vec<String>,
|
||||
}
|
||||
|
||||
impl ExpandSyntax for BackoffColoringMode {
|
||||
type Output = Option<Span>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"BackoffColoringMode"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Option<Span> {
|
||||
loop {
|
||||
if token_nodes.at_end() {
|
||||
break;
|
||||
}
|
||||
|
||||
let len = token_nodes.state().shapes().len();
|
||||
token_nodes.expand_infallible(SimplestExpression::new(self.allowed.clone()));
|
||||
|
||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
||||
// This shouldn't happen, but if it does, a panic is better than an infinite loop
|
||||
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes());
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
|
||||
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
|
||||
/// expression, fall back to simple coloring.
|
||||
#[derive(Debug, Clone, new)]
|
||||
pub struct SimplestExpression {
|
||||
valid_shapes: Vec<String>,
|
||||
}
|
||||
|
||||
impl ExpandSyntax for SimplestExpression {
|
||||
type Output = Span;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"SimplestExpression"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Span {
|
||||
if token_nodes.at_end() {
|
||||
return Span::unknown();
|
||||
}
|
||||
|
||||
let source = token_nodes.source();
|
||||
|
||||
let peeked = token_nodes.peek();
|
||||
|
||||
match peeked.not_eof("simplest expression") {
|
||||
Err(_) => token_nodes.span_at_cursor(),
|
||||
Ok(peeked) => {
|
||||
let token = peeked.commit();
|
||||
|
||||
for shape in FlatShape::shapes(token, &source) {
|
||||
token_nodes.color_err(shape, self.valid_shapes.clone())
|
||||
}
|
||||
|
||||
token.span()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
109
crates/nu-parser/src/hir/syntax_shape/expression/number.rs
Normal file
109
crates/nu-parser/src/hir/syntax_shape/expression/number.rs
Normal file
@ -0,0 +1,109 @@
|
||||
use crate::hir::syntax_shape::{ExpandSyntax, FlatShape};
|
||||
use crate::hir::{Expression, SpannedExpression};
|
||||
use crate::hir::{RawNumber, TokensIterator};
|
||||
use crate::parse::token_tree::{DecimalType, IntType};
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::HasSpan;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NumberExpressionShape;
|
||||
|
||||
impl ExpandSyntax for NumberExpressionShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"number"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
let source = token_nodes.source();
|
||||
|
||||
token_nodes
|
||||
.expand_syntax(NumberShape)
|
||||
.map(|number| Expression::number(number.to_number(&source)).into_expr(number.span()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct IntExpressionShape;
|
||||
|
||||
impl ExpandSyntax for IntExpressionShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"integer"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
let source = token_nodes.source();
|
||||
|
||||
token_nodes.expand_token(IntType, |number| {
|
||||
Ok((
|
||||
FlatShape::Int,
|
||||
Expression::number(number.to_number(&source)),
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct IntShape;
|
||||
|
||||
impl ExpandSyntax for IntShape {
|
||||
type Output = Result<RawNumber, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"integer"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<RawNumber, ParseError> {
|
||||
token_nodes.expand_token(IntType, |number| Ok((FlatShape::Int, number)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct DecimalShape;
|
||||
|
||||
impl ExpandSyntax for DecimalShape {
|
||||
type Output = Result<RawNumber, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"decimal"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<RawNumber, ParseError> {
|
||||
token_nodes.expand_token(DecimalType, |number| Ok((FlatShape::Decimal, number)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NumberShape;
|
||||
|
||||
impl ExpandSyntax for NumberShape {
|
||||
type Output = Result<RawNumber, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"decimal"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<RawNumber, ParseError> {
|
||||
token_nodes
|
||||
.expand_syntax(IntShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(DecimalShape))
|
||||
}
|
||||
}
|
86
crates/nu-parser/src/hir/syntax_shape/expression/pattern.rs
Normal file
86
crates/nu-parser/src/hir/syntax_shape/expression/pattern.rs
Normal file
@ -0,0 +1,86 @@
|
||||
use crate::hir::syntax_shape::{
|
||||
expand_bare, expression::expand_file_path, BarePathShape, ExpandContext, ExpandSyntax,
|
||||
ExternalWordShape, StringShape,
|
||||
};
|
||||
use crate::hir::{Expression, SpannedExpression};
|
||||
use crate::parse::operator::EvaluationOperator;
|
||||
use crate::{hir, hir::TokensIterator, Token};
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::Span;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PatternShape;
|
||||
|
||||
impl ExpandSyntax for PatternShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"glob pattern"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<hir::SpannedExpression, ParseError> {
|
||||
let (inner, outer) = token_nodes
|
||||
.expand_syntax(BarePatternShape)
|
||||
.or_else(|_| token_nodes.expand_syntax(BarePathShape))
|
||||
.or_else(|_| token_nodes.expand_syntax(ExternalWordShape))
|
||||
.map(|span| (span, span))
|
||||
.or_else(|_| {
|
||||
token_nodes
|
||||
.expand_syntax(StringShape)
|
||||
.map(|syntax| (syntax.inner, syntax.span))
|
||||
})
|
||||
.map_err(|_| token_nodes.err_next_token("glob pattern"))?;
|
||||
|
||||
Ok(file_pattern(inner, outer, token_nodes.context()))
|
||||
}
|
||||
}
|
||||
|
||||
fn file_pattern(body: Span, outer: Span, context: &ExpandContext) -> SpannedExpression {
|
||||
let path = expand_file_path(body.slice(context.source), context);
|
||||
Expression::pattern(path.to_string_lossy()).into_expr(outer)
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PatternExpressionShape;
|
||||
|
||||
impl ExpandSyntax for PatternExpressionShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"pattern"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
token_nodes.expand_syntax(BarePatternShape).map(|span| {
|
||||
let path = expand_file_path(span.slice(&token_nodes.source()), token_nodes.context());
|
||||
Expression::pattern(path.to_string_lossy()).into_expr(span)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BarePatternShape;
|
||||
|
||||
impl ExpandSyntax for BarePatternShape {
|
||||
type Output = Result<Span, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"bare pattern"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||
expand_bare(token_nodes, |token| match token.unspanned() {
|
||||
Token::Bare
|
||||
| Token::EvaluationOperator(EvaluationOperator::Dot)
|
||||
| Token::GlobPattern => true,
|
||||
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
}
|
47
crates/nu-parser/src/hir/syntax_shape/expression/range.rs
Normal file
47
crates/nu-parser/src/hir/syntax_shape/expression/range.rs
Normal file
@ -0,0 +1,47 @@
|
||||
use crate::hir::syntax_shape::{AnyExpressionStartShape, ExpandSyntax, FlatShape};
|
||||
use crate::hir::TokensIterator;
|
||||
use crate::hir::{Expression, SpannedExpression};
|
||||
use crate::parse::token_tree::DotDotType;
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::{HasSpan, Span};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct RangeShape;
|
||||
|
||||
impl ExpandSyntax for RangeShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"range"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
let left = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
||||
let dotdot = token_nodes.expand_syntax(DotDotShape)?;
|
||||
let right = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
||||
|
||||
let span = left.span.until(right.span);
|
||||
|
||||
Ok(Expression::range(left, dotdot, right).into_expr(span))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct DotDotShape;
|
||||
|
||||
impl ExpandSyntax for DotDotShape {
|
||||
type Output = Result<Span, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"dotdot"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||
token_nodes.expand_token(DotDotType, |token| Ok((FlatShape::DotDot, token.span())))
|
||||
}
|
||||
}
|
103
crates/nu-parser/src/hir/syntax_shape/expression/string.rs
Normal file
103
crates/nu-parser/src/hir/syntax_shape/expression/string.rs
Normal file
@ -0,0 +1,103 @@
|
||||
use crate::hir::syntax_shape::{ExpandSyntax, FlatShape, NumberShape, VariableShape};
|
||||
use crate::hir::TokensIterator;
|
||||
use crate::hir::{Expression, SpannedExpression};
|
||||
use crate::parse::token_tree::{BareType, StringType};
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct CoerceStringShape;
|
||||
|
||||
impl ExpandSyntax for CoerceStringShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"StringShape"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
token_nodes
|
||||
.expand_token(StringType, |(inner, outer)| {
|
||||
Ok((
|
||||
FlatShape::String,
|
||||
Expression::string(inner).into_expr(outer),
|
||||
))
|
||||
})
|
||||
.or_else(|_| {
|
||||
token_nodes.expand_token(BareType, |span| {
|
||||
Ok((FlatShape::String, Expression::string(span).into_expr(span)))
|
||||
})
|
||||
})
|
||||
.or_else(|_| {
|
||||
token_nodes
|
||||
.expand_syntax(NumberShape)
|
||||
.map(|number| Expression::string(number.span()).into_expr(number.span()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StringExpressionShape;
|
||||
|
||||
impl ExpandSyntax for StringExpressionShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"string"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
token_nodes.expand_syntax(VariableShape).or_else(|_| {
|
||||
token_nodes.expand_token(StringType, |(inner, outer)| {
|
||||
Ok((
|
||||
FlatShape::String,
|
||||
Expression::string(inner).into_expr(outer),
|
||||
))
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StringSyntax {
|
||||
pub inner: Span,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for StringSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for StringSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::primitive(self.span.slice(source))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StringShape;
|
||||
|
||||
impl ExpandSyntax for StringShape {
|
||||
type Output = Result<StringSyntax, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"string"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<StringSyntax, ParseError> {
|
||||
token_nodes.expand_token(StringType, |(inner, outer)| {
|
||||
Ok((FlatShape::String, StringSyntax { inner, span: outer }))
|
||||
})
|
||||
}
|
||||
}
|
@ -1,15 +1,19 @@
|
||||
use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax, ParseError};
|
||||
use crate::parser::parse::tokens::RawNumber;
|
||||
use crate::parser::parse::tokens::Token;
|
||||
use crate::parser::parse::unit::Unit;
|
||||
use crate::parser::{hir::TokensIterator, TokenNode, UnspannedToken};
|
||||
use crate::prelude::*;
|
||||
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||
use crate::hir::syntax_shape::ExpandSyntax;
|
||||
use crate::hir::TokensIterator;
|
||||
use crate::hir::{Expression, SpannedExpression};
|
||||
use crate::parse::number::RawNumber;
|
||||
use crate::parse::token_tree::BareType;
|
||||
use crate::parse::unit::Unit;
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::character::complete::digit1;
|
||||
use nom::combinator::{all_consuming, opt, value};
|
||||
use nom::IResult;
|
||||
use nu_source::{Span, Spanned};
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UnitSyntax {
|
||||
@ -17,6 +21,17 @@ pub struct UnitSyntax {
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl UnitSyntax {
|
||||
pub fn into_expr(self, source: &Text) -> SpannedExpression {
|
||||
let UnitSyntax {
|
||||
unit: (number, unit),
|
||||
span,
|
||||
} = self;
|
||||
|
||||
Expression::size(number.to_number(source), *unit).into_expr(span)
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for UnitSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
@ -32,42 +47,60 @@ impl HasSpan for UnitSyntax {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct UnitExpressionShape;
|
||||
|
||||
impl ExpandSyntax for UnitExpressionShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"unit expression"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
token_nodes
|
||||
.expand_syntax(UnitShape)
|
||||
.map(|unit| unit.into_expr(&token_nodes.source()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct UnitShape;
|
||||
|
||||
impl ExpandSyntax for UnitShape {
|
||||
type Output = UnitSyntax;
|
||||
type Output = Result<UnitSyntax, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"unit"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<UnitSyntax, ParseError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("unit")?;
|
||||
let source = token_nodes.source();
|
||||
|
||||
let span = match peeked.node {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Bare,
|
||||
span,
|
||||
}) => *span,
|
||||
_ => return Err(peeked.type_error("unit")),
|
||||
};
|
||||
|
||||
let unit = unit_size(span.slice(context.source), span);
|
||||
token_nodes.expand_token(BareType, |span| {
|
||||
let unit = unit_size(span.slice(&source), span);
|
||||
|
||||
let (_, (number, unit)) = match unit {
|
||||
Err(_) => return Err(ParseError::mismatch("unit", "word".spanned(span))),
|
||||
Ok((number, unit)) => (number, unit),
|
||||
};
|
||||
|
||||
peeked.commit();
|
||||
Ok(UnitSyntax {
|
||||
Ok((
|
||||
FlatShape::Size {
|
||||
number: number.span(),
|
||||
unit: unit.span,
|
||||
},
|
||||
UnitSyntax {
|
||||
unit: (number, unit),
|
||||
span,
|
||||
},
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
@ -0,0 +1,644 @@
|
||||
use crate::hir::syntax_shape::{
|
||||
AnyExpressionShape, BareShape, ExpandSyntax, FlatShape, IntShape, ParseError, StringShape,
|
||||
WhitespaceShape,
|
||||
};
|
||||
use crate::hir::{Expression, SpannedExpression, TokensIterator};
|
||||
use crate::parse::token_tree::{CompareOperatorType, DotDotType, DotType, ItVarType, VarType};
|
||||
use crate::{hir, CompareOperator};
|
||||
use nu_protocol::{PathMember, ShellTypeName};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||
Tag, Tagged, TaggedItem, Text,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct VariablePathShape;
|
||||
|
||||
impl ExpandSyntax for VariablePathShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"variable path"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
// 1. let the head be the first token, expecting a variable
|
||||
// 2. let the tail be an empty list of members
|
||||
// 2. while the next token (excluding ws) is a dot:
|
||||
// 1. consume the dot
|
||||
// 2. consume the next token as a member and push it onto tail
|
||||
|
||||
let head = token_nodes.expand_syntax(VariableShape)?;
|
||||
let start = head.span;
|
||||
let mut end = start;
|
||||
let mut tail: Vec<PathMember> = vec![];
|
||||
|
||||
loop {
|
||||
if token_nodes.expand_syntax(DotShape).is_err() {
|
||||
break;
|
||||
}
|
||||
|
||||
let member = token_nodes.expand_syntax(MemberShape)?;
|
||||
let member = member.to_path_member(&token_nodes.source());
|
||||
|
||||
end = member.span;
|
||||
tail.push(member);
|
||||
}
|
||||
|
||||
Ok(Expression::path(head, tail).into_expr(start.until(end)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PathTailShape;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PathTailSyntax {
|
||||
pub tail: Vec<PathMember>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for PathTailSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for PathTailSyntax {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::typed("tail", b::intersperse(self.tail.iter(), b::space()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for PathTailShape {
|
||||
type Output = Result<PathTailSyntax, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"path continuation"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<PathTailSyntax, ParseError> {
|
||||
let mut end: Option<Span> = None;
|
||||
let mut tail: Vec<PathMember> = vec![];
|
||||
|
||||
loop {
|
||||
if token_nodes.expand_syntax(DotShape).is_err() {
|
||||
break;
|
||||
}
|
||||
|
||||
let member = token_nodes.expand_syntax(MemberShape)?;
|
||||
let member = member.to_path_member(&token_nodes.source());
|
||||
end = Some(member.span);
|
||||
tail.push(member);
|
||||
}
|
||||
|
||||
match end {
|
||||
None => Err(token_nodes.err_next_token("path continuation")),
|
||||
|
||||
Some(end) => Ok(PathTailSyntax { tail, span: end }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ContinuationSyntax {
|
||||
kind: ContinuationSyntaxKind,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl ContinuationSyntax {
|
||||
pub fn append_to(self, expr: SpannedExpression) -> SpannedExpression {
|
||||
match self.kind {
|
||||
ContinuationSyntaxKind::Infix(op, right) => {
|
||||
let span = expr.span.until(right.span);
|
||||
Expression::infix(expr, op, right).into_expr(span)
|
||||
}
|
||||
ContinuationSyntaxKind::Dot(_, member) => {
|
||||
let span = expr.span.until(member.span);
|
||||
Expression::dot_member(expr, member).into_expr(span)
|
||||
}
|
||||
ContinuationSyntaxKind::DotDot(_, right) => {
|
||||
let span = expr.span.until(right.span);
|
||||
Expression::range(expr, span, right).into_expr(span)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ContinuationSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ContinuationSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed("continuation", self.kind.pretty_debug(source))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ContinuationSyntaxKind {
|
||||
Infix(Spanned<CompareOperator>, SpannedExpression),
|
||||
Dot(Span, PathMember),
|
||||
DotDot(Span, SpannedExpression),
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ContinuationSyntaxKind {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
ContinuationSyntaxKind::Infix(op, expr) => {
|
||||
b::operator(op.span.slice(source)) + expr.pretty_debug(source)
|
||||
}
|
||||
ContinuationSyntaxKind::Dot(span, member) => {
|
||||
b::operator(span.slice(source)) + member.pretty_debug(source)
|
||||
}
|
||||
ContinuationSyntaxKind::DotDot(span, expr) => {
|
||||
b::operator(span.slice(source)) + expr.pretty_debug(source)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An expression continuation
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExpressionContinuationShape;
|
||||
|
||||
impl ExpandSyntax for ExpressionContinuationShape {
|
||||
type Output = Result<ContinuationSyntax, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"expression continuation"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<ContinuationSyntax, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
// Try to expand a `.`
|
||||
let dot = token_nodes.expand_syntax(DotShape);
|
||||
|
||||
if let Ok(dot) = dot {
|
||||
// If a `.` was matched, it's a `Path`, and we expect a `Member` next
|
||||
let syntax = token_nodes.expand_syntax(MemberShape)?;
|
||||
let member = syntax.to_path_member(&token_nodes.source());
|
||||
let member_span = member.span;
|
||||
|
||||
return Ok(ContinuationSyntax {
|
||||
kind: ContinuationSyntaxKind::Dot(dot, member),
|
||||
span: dot.until(member_span),
|
||||
});
|
||||
}
|
||||
|
||||
// Try to expand a `..`
|
||||
let dot = token_nodes.expand_syntax(DotDotShape);
|
||||
|
||||
if let Ok(dotdot) = dot {
|
||||
// If a `..` was matched, it's a `Range`, and we expect an `Expression` next
|
||||
let expr = token_nodes.expand_syntax(AnyExpressionShape)?;
|
||||
let expr_span = expr.span;
|
||||
|
||||
return Ok(ContinuationSyntax {
|
||||
kind: ContinuationSyntaxKind::DotDot(dotdot, expr),
|
||||
span: dotdot.until(expr_span),
|
||||
});
|
||||
}
|
||||
|
||||
// Otherwise, we expect an infix operator and an expression next
|
||||
let (_, op, _) = token_nodes.expand_syntax(InfixShape)?.infix.item;
|
||||
let next = token_nodes.expand_syntax(AnyExpressionShape)?;
|
||||
let next_span = next.span;
|
||||
|
||||
Ok(ContinuationSyntax {
|
||||
kind: ContinuationSyntaxKind::Infix(op.operator, next),
|
||||
span: op.operator.span.until(next_span),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct VariableShape;
|
||||
|
||||
impl ExpandSyntax for VariableShape {
|
||||
type Output = Result<SpannedExpression, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"variable"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<SpannedExpression, ParseError> {
|
||||
token_nodes
|
||||
.expand_token(ItVarType, |(inner, outer)| {
|
||||
Ok((
|
||||
FlatShape::ItVariable,
|
||||
Expression::it_variable(inner).into_expr(outer),
|
||||
))
|
||||
})
|
||||
.or_else(|_| {
|
||||
token_nodes.expand_token(VarType, |(inner, outer)| {
|
||||
Ok((
|
||||
FlatShape::Variable,
|
||||
Expression::variable(inner).into_expr(outer),
|
||||
))
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Member {
|
||||
String(/* outer */ Span, /* inner */ Span),
|
||||
Int(BigInt, Span),
|
||||
Bare(Span),
|
||||
}
|
||||
|
||||
impl ShellTypeName for Member {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
Member::String(_, _) => "string",
|
||||
Member::Int(_, _) => "integer",
|
||||
Member::Bare(_) => "word",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Member {
|
||||
pub fn int(span: Span, source: &Text) -> Member {
|
||||
if let Ok(big_int) = BigInt::from_str(span.slice(source)) {
|
||||
Member::Int(big_int, span)
|
||||
} else {
|
||||
unreachable!("Internal error: could not convert text to BigInt as expected")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_path_member(&self, source: &Text) -> PathMember {
|
||||
match self {
|
||||
Member::String(outer, inner) => PathMember::string(inner.slice(source), *outer),
|
||||
Member::Int(int, span) => PathMember::int(int.clone(), *span),
|
||||
Member::Bare(span) => PathMember::string(span.slice(source), *span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Member {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
Member::String(outer, _) => b::value(outer.slice(source)),
|
||||
Member::Int(int, _) => b::value(format!("{}", int)),
|
||||
Member::Bare(span) => b::value(span.slice(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Member {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
Member::String(outer, ..) => *outer,
|
||||
Member::Int(_, int) => *int,
|
||||
Member::Bare(name) => *name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Member {
|
||||
pub fn to_expr(&self) -> hir::SpannedExpression {
|
||||
match self {
|
||||
Member::String(outer, inner) => Expression::string(*inner).into_expr(outer),
|
||||
Member::Int(number, span) => Expression::number(number.clone()).into_expr(span),
|
||||
Member::Bare(span) => Expression::string(*span).into_expr(span),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
match self {
|
||||
Member::String(outer, _inner) => *outer,
|
||||
Member::Int(_, span) => *span,
|
||||
Member::Bare(span) => *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ColumnPathState {
|
||||
Initial,
|
||||
LeadingDot(Span),
|
||||
Dot(Span, Vec<Member>, Span),
|
||||
Member(Span, Vec<Member>),
|
||||
Error(ParseError),
|
||||
}
|
||||
|
||||
impl ColumnPathState {
|
||||
pub fn dot(self, dot: Span) -> ColumnPathState {
|
||||
match self {
|
||||
ColumnPathState::Initial => ColumnPathState::LeadingDot(dot),
|
||||
ColumnPathState::LeadingDot(_) => {
|
||||
ColumnPathState::Error(ParseError::mismatch("column", "dot".spanned(dot)))
|
||||
}
|
||||
ColumnPathState::Dot(..) => {
|
||||
ColumnPathState::Error(ParseError::mismatch("column", "dot".spanned(dot)))
|
||||
}
|
||||
ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot),
|
||||
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn member(self, member: Member) -> ColumnPathState {
|
||||
match self {
|
||||
ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]),
|
||||
ColumnPathState::LeadingDot(tag) => {
|
||||
ColumnPathState::Member(tag.until(member.span()), vec![member])
|
||||
}
|
||||
|
||||
ColumnPathState::Dot(tag, mut tags, _) => {
|
||||
ColumnPathState::Member(tag.until(member.span()), {
|
||||
tags.push(member);
|
||||
tags
|
||||
})
|
||||
}
|
||||
ColumnPathState::Member(..) => ColumnPathState::Error(ParseError::mismatch(
|
||||
"column",
|
||||
member.type_name().spanned(member.span()),
|
||||
)),
|
||||
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_path(self, err: ParseError) -> Result<Tagged<Vec<Member>>, ParseError> {
|
||||
match self {
|
||||
ColumnPathState::Initial => Err(err),
|
||||
ColumnPathState::LeadingDot(dot) => {
|
||||
Err(ParseError::mismatch("column", "dot".spanned(dot)))
|
||||
}
|
||||
ColumnPathState::Dot(_tag, _members, dot) => {
|
||||
Err(ParseError::mismatch("column", "dot".spanned(dot)))
|
||||
}
|
||||
ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)),
|
||||
ColumnPathState::Error(err) => Err(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ColumnPathShape;
|
||||
|
||||
impl ExpandSyntax for ColumnPathShape {
|
||||
type Output = Result<ColumnPathSyntax, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"column path"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<ColumnPathSyntax, ParseError> {
|
||||
let mut state = ColumnPathState::Initial;
|
||||
|
||||
loop {
|
||||
let member = token_nodes.expand_syntax(MemberShape);
|
||||
|
||||
match member {
|
||||
Err(_) => break,
|
||||
Ok(member) => state = state.member(member),
|
||||
}
|
||||
|
||||
let dot = token_nodes.expand_syntax(DotShape);
|
||||
|
||||
match dot {
|
||||
Err(_) => break,
|
||||
Ok(dot) => state = state.dot(dot),
|
||||
}
|
||||
}
|
||||
|
||||
let path = state.into_path(token_nodes.err_next_token("column path"))?;
|
||||
|
||||
Ok(ColumnPathSyntax {
|
||||
path: path.item,
|
||||
tag: path.tag,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ColumnPathSyntax {
|
||||
pub path: Vec<Member>,
|
||||
pub tag: Tag,
|
||||
}
|
||||
|
||||
impl HasSpan for ColumnPathSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.tag.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ColumnPathSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"column path",
|
||||
b::intersperse(
|
||||
self.path.iter().map(|member| member.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct MemberShape;
|
||||
|
||||
impl ExpandSyntax for MemberShape {
|
||||
type Output = Result<Member, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"column"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &mut TokensIterator<'_>) -> Result<Member, ParseError> {
|
||||
if let Ok(int) = token_nodes.expand_syntax(IntMemberShape) {
|
||||
return Ok(int);
|
||||
}
|
||||
|
||||
let bare = token_nodes.expand_syntax(BareShape);
|
||||
|
||||
if let Ok(bare) = bare {
|
||||
return Ok(Member::Bare(bare.span()));
|
||||
}
|
||||
|
||||
/* KATZ */
|
||||
/* let number = NumberShape.test(token_nodes, context);
|
||||
|
||||
if let Some(peeked) = number {
|
||||
let node = peeked.not_eof("column")?.commit();
|
||||
let (n, span) = node.as_number().ok_or_else(|| {
|
||||
ParseError::internal_error("can't convert node to number".spanned(node.span()))
|
||||
})?;
|
||||
|
||||
return Ok(Member::Number(n, span))
|
||||
}*/
|
||||
|
||||
let string = token_nodes.expand_syntax(StringShape);
|
||||
|
||||
if let Ok(syntax) = string {
|
||||
return Ok(Member::String(syntax.span, syntax.inner));
|
||||
}
|
||||
|
||||
Err(token_nodes.peek().type_error("column"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct IntMemberShape;
|
||||
|
||||
impl ExpandSyntax for IntMemberShape {
|
||||
type Output = Result<Member, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"integer member"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<Member, ParseError> {
|
||||
token_nodes
|
||||
.expand_syntax(IntShape)
|
||||
.map(|int| Member::int(int.span(), &token_nodes.source()))
|
||||
.or_else(|_| Err(token_nodes.err_next_token("integer member")))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct DotShape;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ColorableDotShape;
|
||||
|
||||
impl ExpandSyntax for DotShape {
|
||||
type Output = Result<Span, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"dot"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||
token_nodes.expand_token(DotType, |token| Ok((FlatShape::Dot, token.span())))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct DotDotShape;
|
||||
|
||||
impl ExpandSyntax for DotDotShape {
|
||||
type Output = Result<Span, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"dotdot"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||
token_nodes.expand_token(DotDotType, |token| Ok((FlatShape::DotDot, token.span())))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct InfixShape;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct InfixSyntax {
|
||||
infix: Spanned<(Span, InfixInnerSyntax, Span)>,
|
||||
}
|
||||
|
||||
impl HasSpan for InfixSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.infix.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for InfixSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
self.infix.1.pretty_debug(source)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for InfixShape {
|
||||
type Output = Result<InfixSyntax, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"infix operator"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<InfixSyntax, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
// An infix operator must be prefixed by whitespace
|
||||
let start = token_nodes.expand_syntax(WhitespaceShape)?;
|
||||
|
||||
// Parse the next TokenNode after the whitespace
|
||||
let operator = token_nodes.expand_syntax(InfixInnerShape)?;
|
||||
|
||||
// An infix operator must be followed by whitespace
|
||||
let end = token_nodes.expand_syntax(WhitespaceShape)?;
|
||||
|
||||
Ok(InfixSyntax {
|
||||
infix: (start, operator, end).spanned(start.until(end)),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct InfixInnerSyntax {
|
||||
pub operator: Spanned<CompareOperator>,
|
||||
}
|
||||
|
||||
impl HasSpan for InfixInnerSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.operator.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for InfixInnerSyntax {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
self.operator.pretty()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct InfixInnerShape;
|
||||
|
||||
impl ExpandSyntax for InfixInnerShape {
|
||||
type Output = Result<InfixInnerSyntax, ParseError>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"infix inner"
|
||||
}
|
||||
|
||||
fn expand<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
) -> Result<InfixInnerSyntax, ParseError> {
|
||||
token_nodes.expand_token(CompareOperatorType, |(span, operator)| {
|
||||
Ok((
|
||||
FlatShape::CompareOperator,
|
||||
InfixInnerSyntax {
|
||||
operator: operator.spanned(span),
|
||||
},
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
193
crates/nu-parser/src/hir/syntax_shape/flat_shape.rs
Normal file
193
crates/nu-parser/src/hir/syntax_shape/flat_shape.rs
Normal file
@ -0,0 +1,193 @@
|
||||
use crate::parse::flag::{Flag, FlagKind};
|
||||
use crate::parse::number::RawNumber;
|
||||
use crate::parse::operator::EvaluationOperator;
|
||||
use crate::parse::token_tree::{Delimiter, SpannedToken, Token};
|
||||
use nu_protocol::ShellTypeName;
|
||||
use nu_source::{DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem, Text};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum FlatShape {
|
||||
OpenDelimiter(Delimiter),
|
||||
CloseDelimiter(Delimiter),
|
||||
Type,
|
||||
Identifier,
|
||||
ItVariable,
|
||||
Variable,
|
||||
CompareOperator,
|
||||
Dot,
|
||||
DotDot,
|
||||
InternalCommand,
|
||||
ExternalCommand,
|
||||
ExternalWord,
|
||||
BareMember,
|
||||
StringMember,
|
||||
String,
|
||||
Path,
|
||||
Word,
|
||||
Keyword,
|
||||
Pipe,
|
||||
GlobPattern,
|
||||
Flag,
|
||||
ShorthandFlag,
|
||||
Int,
|
||||
Decimal,
|
||||
Garbage,
|
||||
Whitespace,
|
||||
Separator,
|
||||
Comment,
|
||||
Size { number: Span, unit: Span },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ShapeResult {
|
||||
Success(Spanned<FlatShape>),
|
||||
Fallback {
|
||||
shape: Spanned<FlatShape>,
|
||||
allowed: Vec<String>,
|
||||
},
|
||||
}
|
||||
|
||||
impl HasSpan for ShapeResult {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
ShapeResult::Success(shape) => shape.span,
|
||||
ShapeResult::Fallback { shape, .. } => shape.span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for FlatShape {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct TraceShape {
|
||||
shape: FlatShape,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl ShellTypeName for TraceShape {
|
||||
fn type_name(&self) -> &'static str {
|
||||
self.shape.type_name()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for TraceShape {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
self.shape.pretty()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for TraceShape {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for FlatShape {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
FlatShape::OpenDelimiter(Delimiter::Brace) => "open brace",
|
||||
FlatShape::OpenDelimiter(Delimiter::Paren) => "open paren",
|
||||
FlatShape::OpenDelimiter(Delimiter::Square) => "open square",
|
||||
FlatShape::CloseDelimiter(Delimiter::Brace) => "close brace",
|
||||
FlatShape::CloseDelimiter(Delimiter::Paren) => "close paren",
|
||||
FlatShape::CloseDelimiter(Delimiter::Square) => "close square",
|
||||
FlatShape::Type => "type",
|
||||
FlatShape::Identifier => "identifier",
|
||||
FlatShape::ItVariable => "$it",
|
||||
FlatShape::Variable => "variable",
|
||||
FlatShape::CompareOperator => "comparison",
|
||||
FlatShape::Dot => "dot",
|
||||
FlatShape::DotDot => "dotdot",
|
||||
FlatShape::InternalCommand => "internal command",
|
||||
FlatShape::ExternalCommand => "external command",
|
||||
FlatShape::ExternalWord => "external word",
|
||||
FlatShape::BareMember => "bare member",
|
||||
FlatShape::StringMember => "string member",
|
||||
FlatShape::String => "string",
|
||||
FlatShape::Path => "path",
|
||||
FlatShape::Word => "word",
|
||||
FlatShape::Keyword => "keyword",
|
||||
FlatShape::Pipe => "pipe",
|
||||
FlatShape::GlobPattern => "glob",
|
||||
FlatShape::Flag => "flag",
|
||||
FlatShape::ShorthandFlag => "shorthand flag",
|
||||
FlatShape::Int => "int",
|
||||
FlatShape::Decimal => "decimal",
|
||||
FlatShape::Garbage => "garbage",
|
||||
FlatShape::Whitespace => "whitespace",
|
||||
FlatShape::Separator => "separator",
|
||||
FlatShape::Comment => "comment",
|
||||
FlatShape::Size { .. } => "size",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FlatShape {
|
||||
pub fn into_trace_shape(self, span: Span) -> TraceShape {
|
||||
TraceShape { shape: self, span }
|
||||
}
|
||||
|
||||
pub fn shapes(token: &SpannedToken, source: &Text) -> Vec<Spanned<FlatShape>> {
|
||||
let mut shapes = vec![];
|
||||
|
||||
FlatShape::from(token, source, &mut shapes);
|
||||
shapes
|
||||
}
|
||||
|
||||
fn from(token: &SpannedToken, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) {
|
||||
let span = token.span();
|
||||
|
||||
match token.unspanned() {
|
||||
Token::Number(RawNumber::Int(_)) => shapes.push(FlatShape::Int.spanned(span)),
|
||||
Token::Number(RawNumber::Decimal(_)) => shapes.push(FlatShape::Decimal.spanned(span)),
|
||||
Token::EvaluationOperator(EvaluationOperator::Dot) => {
|
||||
shapes.push(FlatShape::Dot.spanned(span))
|
||||
}
|
||||
Token::EvaluationOperator(EvaluationOperator::DotDot) => {
|
||||
shapes.push(FlatShape::DotDot.spanned(span))
|
||||
}
|
||||
Token::CompareOperator(_) => shapes.push(FlatShape::CompareOperator.spanned(span)),
|
||||
Token::String(_) => shapes.push(FlatShape::String.spanned(span)),
|
||||
Token::Variable(v) if v.slice(source) == "it" => {
|
||||
shapes.push(FlatShape::ItVariable.spanned(span))
|
||||
}
|
||||
Token::Variable(_) => shapes.push(FlatShape::Variable.spanned(span)),
|
||||
Token::ItVariable(_) => shapes.push(FlatShape::ItVariable.spanned(span)),
|
||||
Token::ExternalCommand(_) => shapes.push(FlatShape::ExternalCommand.spanned(span)),
|
||||
Token::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(span)),
|
||||
Token::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(span)),
|
||||
Token::Bare => shapes.push(FlatShape::Word.spanned(span)),
|
||||
Token::Call(_) => unimplemented!(),
|
||||
Token::Delimited(v) => {
|
||||
shapes.push(FlatShape::OpenDelimiter(v.delimiter).spanned(v.spans.0));
|
||||
for token in &v.children {
|
||||
FlatShape::from(token, source, shapes);
|
||||
}
|
||||
shapes.push(FlatShape::CloseDelimiter(v.delimiter).spanned(v.spans.1));
|
||||
}
|
||||
Token::Pipeline(pipeline) => {
|
||||
for part in &pipeline.parts {
|
||||
if part.pipe.is_some() {
|
||||
shapes.push(FlatShape::Pipe.spanned(part.span()));
|
||||
}
|
||||
}
|
||||
}
|
||||
Token::Flag(Flag {
|
||||
kind: FlagKind::Longhand,
|
||||
..
|
||||
}) => shapes.push(FlatShape::Flag.spanned(span)),
|
||||
Token::Flag(Flag {
|
||||
kind: FlagKind::Shorthand,
|
||||
..
|
||||
}) => shapes.push(FlatShape::ShorthandFlag.spanned(span)),
|
||||
Token::Garbage => shapes.push(FlatShape::Garbage.spanned(span)),
|
||||
Token::Whitespace => shapes.push(FlatShape::Whitespace.spanned(span)),
|
||||
Token::Separator => shapes.push(FlatShape::Separator.spanned(span)),
|
||||
Token::Comment(_) => shapes.push(FlatShape::Comment.spanned(span)),
|
||||
}
|
||||
}
|
||||
}
|
602
crates/nu-parser/src/hir/tokens_iterator.rs
Normal file
602
crates/nu-parser/src/hir/tokens_iterator.rs
Normal file
@ -0,0 +1,602 @@
|
||||
pub(crate) mod debug;
|
||||
pub(crate) mod into_shapes;
|
||||
pub(crate) mod pattern;
|
||||
pub(crate) mod state;
|
||||
|
||||
use self::debug::ExpandTracer;
|
||||
use self::into_shapes::IntoShapes;
|
||||
use self::state::{Peeked, TokensIteratorState};
|
||||
|
||||
use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
||||
use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax, ExpressionListShape};
|
||||
use crate::hir::SpannedExpression;
|
||||
use crate::parse::token_tree::{BlockType, DelimitedNode, SpannedToken, SquareType, TokenType};
|
||||
|
||||
use getset::{Getters, MutGetters};
|
||||
use nu_errors::ParseError;
|
||||
use nu_protocol::SpannedTypeName;
|
||||
use nu_source::{
|
||||
HasFallibleSpan, HasSpan, IntoSpanned, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
||||
};
|
||||
use std::borrow::Borrow;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Getters, MutGetters, Clone, Debug)]
|
||||
pub struct TokensIterator<'content> {
|
||||
#[get = "pub"]
|
||||
#[get_mut = "pub"]
|
||||
state: TokensIteratorState<'content>,
|
||||
#[get = "pub"]
|
||||
#[get_mut = "pub"]
|
||||
expand_tracer: ExpandTracer<SpannedExpression>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Checkpoint<'content, 'me> {
|
||||
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||
index: usize,
|
||||
seen: indexmap::IndexSet<usize>,
|
||||
|
||||
shape_start: usize,
|
||||
committed: bool,
|
||||
}
|
||||
|
||||
impl<'content, 'me> Checkpoint<'content, 'me> {
|
||||
pub(crate) fn commit(mut self) {
|
||||
self.committed = true;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
|
||||
fn drop(&mut self) {
|
||||
if !self.committed {
|
||||
let state = &mut self.iterator.state;
|
||||
|
||||
state.index = self.index;
|
||||
state.seen = self.seen.clone();
|
||||
|
||||
state.shapes.truncate(self.shape_start);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For parse_command
|
||||
impl<'content> TokensIterator<'content> {
|
||||
pub fn sort_shapes(&mut self) {
|
||||
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
||||
// this solution.
|
||||
|
||||
self.state
|
||||
.shapes
|
||||
.sort_by(|a, b| a.span().start().cmp(&b.span().start()));
|
||||
}
|
||||
|
||||
/// Run a block of code, retrieving the shapes that were created during the block. This is
|
||||
/// used by `parse_command` to associate shapes with a particular flag.
|
||||
pub fn shapes_for<'me, T>(
|
||||
&'me mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ParseError>,
|
||||
) -> (Result<T, ParseError>, Vec<ShapeResult>) {
|
||||
let index = self.state.index;
|
||||
let mut shapes = vec![];
|
||||
let mut errors = self.state.errors.clone();
|
||||
|
||||
let seen = self.state.seen.clone();
|
||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
||||
std::mem::swap(&mut self.state.errors, &mut errors);
|
||||
|
||||
let checkpoint = Checkpoint {
|
||||
iterator: self,
|
||||
index,
|
||||
seen,
|
||||
committed: false,
|
||||
shape_start: 0,
|
||||
};
|
||||
|
||||
let value = block(checkpoint.iterator);
|
||||
|
||||
let value = match value {
|
||||
Err(err) => {
|
||||
drop(checkpoint);
|
||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
||||
std::mem::swap(&mut self.state.errors, &mut errors);
|
||||
return (Err(err), vec![]);
|
||||
}
|
||||
|
||||
Ok(value) => value,
|
||||
};
|
||||
|
||||
checkpoint.commit();
|
||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
||||
|
||||
(Ok(value), shapes)
|
||||
}
|
||||
|
||||
pub fn extract<T>(&mut self, f: impl Fn(&SpannedToken) -> Option<T>) -> Option<(usize, T)> {
|
||||
let state = &mut self.state;
|
||||
|
||||
for (i, item) in state.tokens.iter().enumerate() {
|
||||
if state.seen.contains(&i) {
|
||||
continue;
|
||||
}
|
||||
|
||||
match f(item) {
|
||||
None => {
|
||||
continue;
|
||||
}
|
||||
Some(value) => {
|
||||
state.seen.insert(i);
|
||||
return Some((i, value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.move_to(0);
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, position: usize) {
|
||||
self.state.seen.insert(position);
|
||||
}
|
||||
}
|
||||
|
||||
// Delimited
|
||||
impl<'content> TokensIterator<'content> {
|
||||
pub fn block(&mut self) -> Result<Spanned<Vec<SpannedExpression>>, ParseError> {
|
||||
self.expand_token_with_token_nodes(BlockType, |node, token_nodes| {
|
||||
token_nodes.delimited(node)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn square(&mut self) -> Result<Spanned<Vec<SpannedExpression>>, ParseError> {
|
||||
self.expand_token_with_token_nodes(SquareType, |node, token_nodes| {
|
||||
token_nodes.delimited(node)
|
||||
})
|
||||
}
|
||||
|
||||
fn delimited(
|
||||
&mut self,
|
||||
DelimitedNode {
|
||||
delimiter,
|
||||
spans,
|
||||
children,
|
||||
}: DelimitedNode,
|
||||
) -> Result<(Vec<ShapeResult>, Spanned<Vec<SpannedExpression>>), ParseError> {
|
||||
let span = spans.0.until(spans.1);
|
||||
let (child_shapes, expr) = self.child(children[..].spanned(span), |token_nodes| {
|
||||
token_nodes.expand_infallible(ExpressionListShape).exprs
|
||||
});
|
||||
|
||||
let mut shapes = vec![ShapeResult::Success(
|
||||
FlatShape::OpenDelimiter(delimiter).spanned(spans.0),
|
||||
)];
|
||||
shapes.extend(child_shapes);
|
||||
shapes.push(ShapeResult::Success(
|
||||
FlatShape::CloseDelimiter(delimiter).spanned(spans.1),
|
||||
));
|
||||
|
||||
Ok((shapes, expr))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content> TokensIterator<'content> {
|
||||
pub fn new(
|
||||
items: &'content [SpannedToken],
|
||||
context: ExpandContext<'content>,
|
||||
span: Span,
|
||||
) -> TokensIterator<'content> {
|
||||
let source = context.source();
|
||||
|
||||
TokensIterator {
|
||||
state: TokensIteratorState {
|
||||
tokens: items,
|
||||
span,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
shapes: vec![],
|
||||
errors: indexmap::IndexMap::new(),
|
||||
context: Arc::new(context),
|
||||
},
|
||||
expand_tracer: ExpandTracer::new("Expand Trace", source.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.state.tokens.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.state.tokens.is_empty()
|
||||
}
|
||||
|
||||
pub fn source(&self) -> Text {
|
||||
self.state.context.source().clone()
|
||||
}
|
||||
|
||||
pub fn context(&self) -> &ExpandContext {
|
||||
&self.state.context
|
||||
}
|
||||
|
||||
pub fn color_result(&mut self, shape: ShapeResult) {
|
||||
match shape {
|
||||
ShapeResult::Success(shape) => self.color_shape(shape),
|
||||
ShapeResult::Fallback { shape, allowed } => self.color_err(shape, allowed),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn color_shape(&mut self, shape: Spanned<FlatShape>) {
|
||||
self.with_tracer(|_, tracer| tracer.add_shape(shape.into_trace_shape(shape.span)));
|
||||
self.state.shapes.push(ShapeResult::Success(shape));
|
||||
}
|
||||
|
||||
pub fn color_err(&mut self, shape: Spanned<FlatShape>, valid_shapes: Vec<String>) {
|
||||
self.with_tracer(|_, tracer| tracer.add_err_shape(shape.into_trace_shape(shape.span)));
|
||||
self.state.errors.insert(shape.span, valid_shapes.clone());
|
||||
self.state.shapes.push(ShapeResult::Fallback {
|
||||
shape,
|
||||
allowed: valid_shapes,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn color_shapes(&mut self, shapes: Vec<Spanned<FlatShape>>) {
|
||||
self.with_tracer(|_, tracer| {
|
||||
for shape in &shapes {
|
||||
tracer.add_shape(shape.into_trace_shape(shape.span))
|
||||
}
|
||||
});
|
||||
|
||||
for shape in &shapes {
|
||||
self.state.shapes.push(ShapeResult::Success(*shape));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn child<'me, T>(
|
||||
&'me mut self,
|
||||
tokens: Spanned<&'me [SpannedToken]>,
|
||||
block: impl FnOnce(&mut TokensIterator<'me>) -> T,
|
||||
) -> (Vec<ShapeResult>, T) {
|
||||
let mut shapes = vec![];
|
||||
std::mem::swap(&mut shapes, &mut self.state.shapes);
|
||||
|
||||
let mut errors = self.state.errors.clone();
|
||||
std::mem::swap(&mut errors, &mut self.state.errors);
|
||||
|
||||
let mut expand_tracer = ExpandTracer::new("Expand Trace", self.source());
|
||||
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
|
||||
|
||||
let mut iterator = TokensIterator {
|
||||
state: TokensIteratorState {
|
||||
tokens: tokens.item,
|
||||
span: tokens.span,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
shapes,
|
||||
errors,
|
||||
context: self.state.context.clone(),
|
||||
},
|
||||
expand_tracer,
|
||||
};
|
||||
|
||||
let result = block(&mut iterator);
|
||||
|
||||
std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes);
|
||||
std::mem::swap(&mut iterator.state.errors, &mut self.state.errors);
|
||||
std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer);
|
||||
|
||||
(iterator.state.shapes, result)
|
||||
}
|
||||
|
||||
fn with_tracer(
|
||||
&mut self,
|
||||
block: impl FnOnce(&mut TokensIteratorState, &mut ExpandTracer<SpannedExpression>),
|
||||
) {
|
||||
let state = &mut self.state;
|
||||
let tracer = &mut self.expand_tracer;
|
||||
|
||||
block(state, tracer)
|
||||
}
|
||||
|
||||
pub fn finish_tracer(&mut self) {
|
||||
self.with_tracer(|_, tracer| tracer.finish())
|
||||
}
|
||||
|
||||
pub fn atomic_parse<'me, T, E>(
|
||||
&'me mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, E>,
|
||||
) -> Result<T, E> {
|
||||
let state = &mut self.state;
|
||||
|
||||
let index = state.index;
|
||||
|
||||
let shape_start = state.shapes.len();
|
||||
let seen = state.seen.clone();
|
||||
|
||||
let checkpoint = Checkpoint {
|
||||
iterator: self,
|
||||
index,
|
||||
seen,
|
||||
committed: false,
|
||||
|
||||
shape_start,
|
||||
};
|
||||
|
||||
let value = block(checkpoint.iterator)?;
|
||||
|
||||
checkpoint.commit();
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
fn eof_span(&self) -> Span {
|
||||
Span::new(self.state.span.end(), self.state.span.end())
|
||||
}
|
||||
|
||||
pub fn span_at_cursor(&mut self) -> Span {
|
||||
let next = self.peek();
|
||||
|
||||
match next.node {
|
||||
None => self.eof_span(),
|
||||
Some(node) => node.span(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn at_end(&self) -> bool {
|
||||
next_index(&self.state).is_none()
|
||||
}
|
||||
|
||||
pub fn move_to(&mut self, pos: usize) {
|
||||
self.state.index = pos;
|
||||
}
|
||||
|
||||
/// Peek the next token in the token stream and return a `Peeked`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```ignore
|
||||
/// let peeked = token_nodes.peek().not_eof();
|
||||
/// let node = peeked.node;
|
||||
/// match node.unspanned() {
|
||||
/// Token::Whitespace => {
|
||||
/// let node = peeked.commit();
|
||||
/// return Ok(node.span)
|
||||
/// }
|
||||
/// other => return Err(ParseError::mismatch("whitespace", node.spanned_type_name()))
|
||||
/// }
|
||||
/// ```
|
||||
pub fn peek<'me>(&'me mut self) -> Peeked<'content, 'me> {
|
||||
let state = self.state();
|
||||
let len = state.tokens.len();
|
||||
let from = state.index;
|
||||
|
||||
let index = next_index(state);
|
||||
|
||||
let (node, to) = match index {
|
||||
None => (None, len),
|
||||
|
||||
Some(to) => (Some(&state.tokens[to]), to + 1),
|
||||
};
|
||||
|
||||
Peeked {
|
||||
node,
|
||||
iterator: self,
|
||||
from,
|
||||
to,
|
||||
}
|
||||
}
|
||||
|
||||
/// Produce an error corresponding to the next token.
|
||||
///
|
||||
/// If the next token is EOF, produce an `UnexpectedEof`. Otherwise, produce a `Mismatch`.
|
||||
pub fn err_next_token(&mut self, expected: &'static str) -> ParseError {
|
||||
match next_index(&self.state) {
|
||||
None => ParseError::unexpected_eof(expected, self.eof_span()),
|
||||
Some(index) => {
|
||||
ParseError::mismatch(expected, self.state.tokens[index].spanned_type_name())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expand_token_with_token_nodes<
|
||||
'me,
|
||||
T: 'me,
|
||||
U: IntoSpanned<Output = V>,
|
||||
V: HasFallibleSpan,
|
||||
F: IntoShapes,
|
||||
>(
|
||||
&'me mut self,
|
||||
expected: impl TokenType<Output = T>,
|
||||
block: impl FnOnce(T, &mut Self) -> Result<(F, U), ParseError>,
|
||||
) -> Result<V, ParseError> {
|
||||
let desc = expected.desc();
|
||||
|
||||
let peeked = self.peek().not_eof(desc.borrow())?;
|
||||
|
||||
let (shapes, val) = {
|
||||
let node = peeked.node;
|
||||
let type_name = node.spanned_type_name();
|
||||
|
||||
let func = Box::new(|| Err(ParseError::mismatch(desc.clone().into_owned(), type_name)));
|
||||
|
||||
match expected.extract_token_value(node, &func) {
|
||||
Err(err) => return Err(err),
|
||||
Ok(value) => match block(value, peeked.iterator) {
|
||||
Err(err) => return Err(err),
|
||||
Ok((shape, val)) => {
|
||||
let span = peeked.node.span();
|
||||
peeked.commit();
|
||||
(shape.into_shapes(span), val.into_spanned(span))
|
||||
}
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
for shape in &shapes {
|
||||
self.color_result(shape.clone());
|
||||
}
|
||||
|
||||
Ok(val)
|
||||
}
|
||||
|
||||
/// Expand and color a single token. Takes an `impl TokenType` and produces
|
||||
/// (() | FlatShape | Vec<Spanned<FlatShape>>, Output) (or an error).
|
||||
///
|
||||
/// If a single FlatShape is produced, it is annotated with the span of the
|
||||
/// original token. Otherwise, each FlatShape in the list must already be
|
||||
/// annotated.
|
||||
pub fn expand_token<'me, T, U, V, F>(
|
||||
&'me mut self,
|
||||
expected: impl TokenType<Output = T>,
|
||||
block: impl FnOnce(T) -> Result<(F, U), ParseError>,
|
||||
) -> Result<V, ParseError>
|
||||
where
|
||||
T: 'me,
|
||||
U: IntoSpanned<Output = V>,
|
||||
V: HasFallibleSpan,
|
||||
F: IntoShapes,
|
||||
{
|
||||
self.expand_token_with_token_nodes(expected, |value, _| block(value))
|
||||
}
|
||||
|
||||
fn commit(&mut self, from: usize, to: usize) {
|
||||
for index in from..to {
|
||||
self.state.seen.insert(index);
|
||||
}
|
||||
|
||||
self.state.index = to;
|
||||
}
|
||||
|
||||
pub fn debug_remaining(&self) -> Vec<SpannedToken> {
|
||||
let mut tokens: TokensIterator = self.clone();
|
||||
tokens.move_to(0);
|
||||
tokens.cloned().collect()
|
||||
}
|
||||
|
||||
/// Expand an `ExpandSyntax` whose output is a `Result`, producing either the shape's output
|
||||
/// or a `ParseError`. If the token stream is at EOF, this method produces a ParseError
|
||||
/// (`UnexpectedEof`).
|
||||
///
|
||||
/// You must use `expand_syntax` if the `Output` of the `ExpandSyntax` is a `Result`, but
|
||||
/// it's difficult to model this in the Rust type system.
|
||||
pub fn expand_syntax<U>(
|
||||
&mut self,
|
||||
shape: impl ExpandSyntax<Output = Result<U, ParseError>>,
|
||||
) -> Result<U, ParseError>
|
||||
where
|
||||
U: std::fmt::Debug + HasFallibleSpan + PrettyDebugWithSource + Clone + 'static,
|
||||
{
|
||||
if self.at_end() {
|
||||
self.with_tracer(|_, tracer| tracer.start(shape.name(), None));
|
||||
self.with_tracer(|_, tracer| tracer.eof_frame());
|
||||
return Err(ParseError::unexpected_eof(shape.name(), self.eof_span()));
|
||||
}
|
||||
|
||||
let (result, added_shapes) = self.expand(shape);
|
||||
|
||||
match &result {
|
||||
Ok(val) => self.finish_expand(val, added_shapes),
|
||||
Err(err) => self.with_tracer(|_, tracer| tracer.failed(err)),
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Expand an `impl ExpandSyntax` and produce its Output. Use `expand_infallible` if the
|
||||
/// `ExpandSyntax` cannot produce a `Result`. You must also use `ExpandSyntax` if EOF
|
||||
/// is an error.
|
||||
///
|
||||
/// The purpose of `expand_infallible` is to clearly mark the infallible path through
|
||||
/// and entire list of tokens that produces a fully colored version of the source.
|
||||
///
|
||||
/// If the `ExpandSyntax` can poroduce a `Result`, make sure to use `expand_syntax`,
|
||||
/// which will correctly show the error in the trace.
|
||||
pub fn expand_infallible<U>(&mut self, shape: impl ExpandSyntax<Output = U>) -> U
|
||||
where
|
||||
U: std::fmt::Debug + PrettyDebugWithSource + HasFallibleSpan + Clone + 'static,
|
||||
{
|
||||
let (result, added_shapes) = self.expand(shape);
|
||||
|
||||
self.finish_expand(&result, added_shapes);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn finish_expand<V>(&mut self, val: &V, added_shapes: usize)
|
||||
where
|
||||
V: PrettyDebugWithSource + HasFallibleSpan + Clone,
|
||||
{
|
||||
self.with_tracer(|_, tracer| {
|
||||
if val.maybe_span().is_some() || added_shapes > 0 {
|
||||
tracer.add_result(val.clone());
|
||||
}
|
||||
|
||||
tracer.success();
|
||||
})
|
||||
}
|
||||
|
||||
fn expand<U>(&mut self, shape: impl ExpandSyntax<Output = U>) -> (U, usize)
|
||||
where
|
||||
U: std::fmt::Debug + Clone + 'static,
|
||||
{
|
||||
let desc = shape.name();
|
||||
self.with_tracer(|state, tracer| {
|
||||
tracer.start(
|
||||
desc,
|
||||
next_index(state).map(|index| state.tokens[index].clone()),
|
||||
)
|
||||
});
|
||||
|
||||
let start_shapes = self.state.shapes.len();
|
||||
let result = shape.expand(self);
|
||||
let added_shapes = self.state.shapes.len() - start_shapes;
|
||||
|
||||
(result, added_shapes)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content> Iterator for TokensIterator<'content> {
|
||||
type Item = &'content SpannedToken;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
next(self)
|
||||
}
|
||||
}
|
||||
|
||||
fn next_index(state: &TokensIteratorState) -> Option<usize> {
|
||||
let mut to = state.index;
|
||||
|
||||
loop {
|
||||
if to >= state.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if state.seen.contains(&to) {
|
||||
to += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if to >= state.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
return Some(to);
|
||||
}
|
||||
}
|
||||
|
||||
fn next<'me, 'content>(
|
||||
iterator: &'me mut TokensIterator<'content>,
|
||||
) -> Option<&'content SpannedToken> {
|
||||
let next = next_index(&iterator.state);
|
||||
let len = iterator.len();
|
||||
|
||||
match next {
|
||||
None => {
|
||||
iterator.move_to(len);
|
||||
None
|
||||
}
|
||||
|
||||
Some(index) => {
|
||||
iterator.move_to(index + 1);
|
||||
Some(&iterator.state.tokens[index])
|
||||
}
|
||||
}
|
||||
}
|
@ -6,7 +6,7 @@ pub(crate) mod expand_trace;
|
||||
pub(crate) use self::color_trace::*;
|
||||
pub(crate) use self::expand_trace::*;
|
||||
|
||||
use crate::parser::hir::tokens_iterator::TokensIteratorState;
|
||||
use crate::hir::tokens_iterator::TokensIteratorState;
|
||||
use nu_source::{PrettyDebug, PrettyDebugWithSource, Text};
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -24,13 +24,11 @@ pub(crate) fn debug_tokens(state: &TokensIteratorState, source: &str) -> Vec<Deb
|
||||
out.push(DebugIteratorToken::Cursor);
|
||||
}
|
||||
|
||||
let msg = token.debug(source).to_string();
|
||||
if state.seen.contains(&i) {
|
||||
out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source))));
|
||||
out.push(DebugIteratorToken::Seen(msg));
|
||||
} else {
|
||||
out.push(DebugIteratorToken::Unseen(format!(
|
||||
"{}",
|
||||
token.debug(source)
|
||||
)));
|
||||
out.push(DebugIteratorToken::Unseen(msg));
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,8 @@
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::hir::syntax_shape::FlatShape;
|
||||
use crate::prelude::*;
|
||||
use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
||||
use ansi_term::Color;
|
||||
use log::trace;
|
||||
use nu_source::Spanned;
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_source::{Spanned, Text};
|
||||
use ptree::*;
|
||||
use std::borrow::Cow;
|
||||
use std::io;
|
||||
@ -11,14 +10,24 @@ use std::io;
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum FrameChild {
|
||||
#[allow(unused)]
|
||||
Shape(Spanned<FlatShape>),
|
||||
Shape(ShapeResult),
|
||||
Frame(ColorFrame),
|
||||
}
|
||||
|
||||
impl FrameChild {
|
||||
fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
||||
match self {
|
||||
FrameChild::Shape(shape) => write!(
|
||||
FrameChild::Shape(ShapeResult::Success(shape)) => write!(
|
||||
f,
|
||||
"{} {:?}",
|
||||
Color::White
|
||||
.bold()
|
||||
.on(Color::Green)
|
||||
.paint(format!("{:?}", shape.item)),
|
||||
shape.span.slice(text)
|
||||
),
|
||||
|
||||
FrameChild::Shape(ShapeResult::Fallback { shape, .. }) => write!(
|
||||
f,
|
||||
"{} {:?}",
|
||||
Color::White
|
||||
@ -44,13 +53,13 @@ impl FrameChild {
|
||||
pub struct ColorFrame {
|
||||
description: &'static str,
|
||||
children: Vec<FrameChild>,
|
||||
error: Option<ShellError>,
|
||||
error: Option<ParseError>,
|
||||
}
|
||||
|
||||
impl ColorFrame {
|
||||
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
if self.has_only_error_descendents() {
|
||||
if self.children.len() == 0 {
|
||||
if self.children.is_empty() {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
@ -99,8 +108,7 @@ impl ColorFrame {
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn add_shape(&mut self, shape: Spanned<FlatShape>) {
|
||||
fn add_shape(&mut self, shape: ShapeResult) {
|
||||
self.children.push(FrameChild::Shape(shape))
|
||||
}
|
||||
|
||||
@ -108,17 +116,13 @@ impl ColorFrame {
|
||||
self.any_child_shape(|_| true)
|
||||
}
|
||||
|
||||
fn any_child_shape(&self, predicate: impl Fn(Spanned<FlatShape>) -> bool) -> bool {
|
||||
fn any_child_shape(&self, predicate: impl Fn(&ShapeResult) -> bool) -> bool {
|
||||
for item in &self.children {
|
||||
match item {
|
||||
FrameChild::Shape(shape) => {
|
||||
if predicate(*shape) {
|
||||
if let FrameChild::Shape(shape) = item {
|
||||
if predicate(shape) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
@ -126,15 +130,11 @@ impl ColorFrame {
|
||||
|
||||
fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool {
|
||||
for item in &self.children {
|
||||
match item {
|
||||
FrameChild::Frame(frame) => {
|
||||
if let FrameChild::Frame(frame) = item {
|
||||
if predicate(frame) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
@ -149,7 +149,7 @@ impl ColorFrame {
|
||||
}
|
||||
|
||||
fn has_only_error_descendents(&self) -> bool {
|
||||
if self.children.len() == 0 {
|
||||
if self.children.is_empty() {
|
||||
// if this frame has no children at all, it has only error descendents if this frame
|
||||
// is an error
|
||||
self.error.is_some()
|
||||
@ -181,14 +181,24 @@ impl ColorFrame {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TreeChild {
|
||||
Shape(Spanned<FlatShape>, Text),
|
||||
Shape(ShapeResult, Text),
|
||||
Frame(ColorFrame, Text),
|
||||
}
|
||||
|
||||
impl TreeChild {
|
||||
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
match self {
|
||||
TreeChild::Shape(shape, text) => write!(
|
||||
TreeChild::Shape(ShapeResult::Success(shape), text) => write!(
|
||||
f,
|
||||
"{} {:?}",
|
||||
Color::White
|
||||
.bold()
|
||||
.on(Color::Green)
|
||||
.paint(format!("{:?}", shape.item)),
|
||||
shape.span.slice(text)
|
||||
),
|
||||
|
||||
TreeChild::Shape(ShapeResult::Fallback { shape, .. }, text) => write!(
|
||||
f,
|
||||
"{} {:?}",
|
||||
Color::White
|
||||
@ -260,7 +270,7 @@ impl ColorTracer {
|
||||
|
||||
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
||||
|
||||
if self.frame_stack.len() == 0 {
|
||||
if self.frame_stack.is_empty() {
|
||||
panic!("Can't pop root tracer frame {:#?}", self);
|
||||
}
|
||||
|
||||
@ -299,8 +309,7 @@ impl ColorTracer {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn add_shape(&mut self, shape: Spanned<FlatShape>) {
|
||||
pub fn add_shape(&mut self, shape: ShapeResult) {
|
||||
self.current_frame().add_shape(shape);
|
||||
}
|
||||
|
||||
@ -311,7 +320,7 @@ impl ColorTracer {
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
pub fn failed(&mut self, error: &ShellError) {
|
||||
pub fn failed(&mut self, error: &ParseError) {
|
||||
let mut current = self.pop_frame();
|
||||
current.error = Some(error.clone());
|
||||
self.current_frame()
|
@ -1,25 +1,44 @@
|
||||
use crate::parser::hir::Expression;
|
||||
use crate::prelude::*;
|
||||
use crate::hir::syntax_shape::flat_shape::TraceShape;
|
||||
use crate::hir::SpannedExpression;
|
||||
use crate::parse::token_tree::SpannedToken;
|
||||
use ansi_term::Color;
|
||||
use log::trace;
|
||||
use nu_source::DebugDoc;
|
||||
use nu_errors::{ParseError, ParseErrorReason};
|
||||
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
||||
use nu_source::{DebugDoc, PrettyDebug, PrettyDebugWithSource, Span, Spanned, Text};
|
||||
use ptree::*;
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Debug;
|
||||
use std::io;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FrameChild {
|
||||
Expr(Expression),
|
||||
Frame(ExprFrame),
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum FrameChild<T: SpannedTypeName> {
|
||||
Expr(T),
|
||||
Shape(Result<TraceShape, TraceShape>),
|
||||
Frame(Box<ExprFrame<T>>),
|
||||
Result(DebugDoc),
|
||||
}
|
||||
|
||||
impl FrameChild {
|
||||
fn get_error_leaf(&self) -> Option<&'static str> {
|
||||
fn err_desc(error: &ParseError) -> &'static str {
|
||||
match error.reason() {
|
||||
ParseErrorReason::ExtraTokens { .. } => "extra tokens",
|
||||
ParseErrorReason::Mismatch { .. } => "mismatch",
|
||||
ParseErrorReason::ArgumentError { .. } => "argument error",
|
||||
ParseErrorReason::Eof { .. } => "eof",
|
||||
ParseErrorReason::InternalError { .. } => "internal error",
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpannedTypeName> FrameChild<T> {
|
||||
fn get_error_leaf(&self) -> Option<(&'static str, &'static str)> {
|
||||
match self {
|
||||
FrameChild::Frame(frame) if frame.error.is_some() => {
|
||||
if frame.children.len() == 0 {
|
||||
Some(frame.description)
|
||||
FrameChild::Frame(frame) => {
|
||||
if let Some(error) = &frame.error {
|
||||
if frame.children.is_empty() {
|
||||
Some((frame.description, err_desc(error)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -30,15 +49,34 @@ impl FrameChild {
|
||||
|
||||
fn to_tree_child(&self, text: &Text) -> TreeChild {
|
||||
match self {
|
||||
FrameChild::Expr(expr) => TreeChild::OkExpr(expr.clone(), text.clone()),
|
||||
FrameChild::Expr(expr) => TreeChild::OkExpr {
|
||||
source: expr.spanned_type_name().span,
|
||||
desc: expr.spanned_type_name().item,
|
||||
text: text.clone(),
|
||||
},
|
||||
FrameChild::Shape(Ok(shape)) => TreeChild::OkShape {
|
||||
source: shape.spanned_type_name().span,
|
||||
desc: shape.spanned_type_name().item,
|
||||
text: text.clone(),
|
||||
fallback: false,
|
||||
},
|
||||
FrameChild::Shape(Err(shape)) => TreeChild::OkShape {
|
||||
source: shape.spanned_type_name().span,
|
||||
desc: shape.spanned_type_name().item,
|
||||
text: text.clone(),
|
||||
fallback: true,
|
||||
},
|
||||
FrameChild::Result(result) => {
|
||||
let result = format!("{}", result.display());
|
||||
let result = result.display();
|
||||
TreeChild::OkNonExpr(result)
|
||||
}
|
||||
FrameChild::Frame(frame) => {
|
||||
if frame.error.is_some() {
|
||||
if frame.children.len() == 0 {
|
||||
TreeChild::ErrorLeaf(vec![frame.description])
|
||||
if let Some(err) = &frame.error {
|
||||
if frame.children.is_empty() {
|
||||
TreeChild::ErrorLeaf(
|
||||
vec![(frame.description, err_desc(err))],
|
||||
frame.token_desc(),
|
||||
)
|
||||
} else {
|
||||
TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone())
|
||||
}
|
||||
@ -50,14 +88,22 @@ impl FrameChild {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExprFrame {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExprFrame<T: SpannedTypeName> {
|
||||
description: &'static str,
|
||||
children: Vec<FrameChild>,
|
||||
token: Option<SpannedToken>,
|
||||
children: Vec<FrameChild<T>>,
|
||||
error: Option<ParseError>,
|
||||
}
|
||||
|
||||
impl ExprFrame {
|
||||
impl<T: SpannedTypeName> ExprFrame<T> {
|
||||
fn token_desc(&self) -> &'static str {
|
||||
match &self.token {
|
||||
None => "EOF",
|
||||
Some(token) => token.type_name(),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_tree_frame(&self, text: &Text) -> TreeFrame {
|
||||
let mut children = vec![];
|
||||
let mut errors = vec![];
|
||||
@ -66,27 +112,36 @@ impl ExprFrame {
|
||||
if let Some(error_leaf) = child.get_error_leaf() {
|
||||
errors.push(error_leaf);
|
||||
continue;
|
||||
} else if errors.len() > 0 {
|
||||
children.push(TreeChild::ErrorLeaf(errors));
|
||||
} else if !errors.is_empty() {
|
||||
children.push(TreeChild::ErrorLeaf(errors, self.token_desc()));
|
||||
errors = vec![];
|
||||
}
|
||||
|
||||
children.push(child.to_tree_child(text));
|
||||
}
|
||||
|
||||
if errors.len() > 0 {
|
||||
children.push(TreeChild::ErrorLeaf(errors));
|
||||
if !errors.is_empty() {
|
||||
children.push(TreeChild::ErrorLeaf(errors, self.token_desc()));
|
||||
}
|
||||
|
||||
TreeFrame {
|
||||
description: self.description,
|
||||
token_desc: self.token_desc(),
|
||||
children,
|
||||
error: self.error.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_expr(&mut self, expr: Expression) {
|
||||
self.children.push(FrameChild::Expr(expr))
|
||||
fn add_return(&mut self, value: T) {
|
||||
self.children.push(FrameChild::Expr(value))
|
||||
}
|
||||
|
||||
fn add_shape(&mut self, shape: TraceShape) {
|
||||
self.children.push(FrameChild::Shape(Ok(shape)))
|
||||
}
|
||||
|
||||
fn add_err_shape(&mut self, shape: TraceShape) {
|
||||
self.children.push(FrameChild::Shape(Err(shape)))
|
||||
}
|
||||
|
||||
fn add_result(&mut self, result: impl PrettyDebug) {
|
||||
@ -97,6 +152,7 @@ impl ExprFrame {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TreeFrame {
|
||||
description: &'static str,
|
||||
token_desc: &'static str,
|
||||
children: Vec<TreeChild>,
|
||||
error: Option<ParseError>,
|
||||
}
|
||||
@ -112,45 +168,53 @@ impl TreeFrame {
|
||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))?;
|
||||
}
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
Color::White.bold().paint(&format!("({})", self.token_desc))
|
||||
)?;
|
||||
|
||||
write!(f, " -> ")?;
|
||||
self.children[0].leaf_description(f)
|
||||
} else {
|
||||
if self.error.is_some() {
|
||||
if self.children.len() == 0 {
|
||||
if self.children.is_empty() {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
Color::White.bold().on(Color::Red).paint(self.description)
|
||||
)
|
||||
)?
|
||||
} else {
|
||||
write!(f, "{}", Color::Red.normal().paint(self.description))
|
||||
write!(f, "{}", Color::Red.normal().paint(self.description))?
|
||||
}
|
||||
} else if self.has_descendent_green() {
|
||||
write!(f, "{}", Color::Green.normal().paint(self.description))
|
||||
write!(f, "{}", Color::Green.normal().paint(self.description))?
|
||||
} else {
|
||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))
|
||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))?
|
||||
}
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
Color::White.bold().paint(&format!("({})", self.token_desc))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn has_child_green(&self) -> bool {
|
||||
self.children.iter().any(|item| match item {
|
||||
TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false,
|
||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) => true,
|
||||
TreeChild::OkExpr { .. } | TreeChild::OkShape { .. } | TreeChild::OkNonExpr(..) => true,
|
||||
})
|
||||
}
|
||||
|
||||
fn any_child_frame(&self, predicate: impl Fn(&TreeFrame) -> bool) -> bool {
|
||||
for item in &self.children {
|
||||
match item {
|
||||
TreeChild::OkFrame(frame, ..) => {
|
||||
if let TreeChild::OkFrame(frame, ..) = item {
|
||||
if predicate(frame) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
@ -168,9 +232,10 @@ impl TreeFrame {
|
||||
if self.children.len() == 1 {
|
||||
let child: &TreeChild = &self.children[0];
|
||||
match child {
|
||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => {
|
||||
vec![]
|
||||
}
|
||||
TreeChild::OkExpr { .. }
|
||||
| TreeChild::OkShape { .. }
|
||||
| TreeChild::OkNonExpr(..)
|
||||
| TreeChild::ErrorLeaf(..) => vec![],
|
||||
TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => {
|
||||
frame.children_for_formatting(text)
|
||||
}
|
||||
@ -184,21 +249,44 @@ impl TreeFrame {
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TreeChild {
|
||||
OkNonExpr(String),
|
||||
OkExpr(Expression, Text),
|
||||
OkExpr {
|
||||
source: Span,
|
||||
desc: &'static str,
|
||||
text: Text,
|
||||
},
|
||||
OkShape {
|
||||
source: Span,
|
||||
desc: &'static str,
|
||||
text: Text,
|
||||
fallback: bool,
|
||||
},
|
||||
OkFrame(TreeFrame, Text),
|
||||
ErrorFrame(TreeFrame, Text),
|
||||
ErrorLeaf(Vec<&'static str>),
|
||||
ErrorLeaf(Vec<(&'static str, &'static str)>, &'static str),
|
||||
}
|
||||
|
||||
impl TreeChild {
|
||||
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
match self {
|
||||
TreeChild::OkExpr(expr, text) => write!(
|
||||
TreeChild::OkExpr { source, desc, text } => write!(
|
||||
f,
|
||||
"{} {} {}",
|
||||
Color::Cyan.normal().paint("returns"),
|
||||
Color::White.bold().on(Color::Green).paint(expr.type_name()),
|
||||
expr.span.slice(text)
|
||||
Color::White.bold().on(Color::Green).paint(*desc),
|
||||
source.slice(text)
|
||||
),
|
||||
|
||||
TreeChild::OkShape {
|
||||
source,
|
||||
desc,
|
||||
text,
|
||||
fallback,
|
||||
} => write!(
|
||||
f,
|
||||
"{} {} {}",
|
||||
Color::Purple.normal().paint("paints"),
|
||||
Color::White.bold().on(Color::Green).paint(*desc),
|
||||
source.slice(text)
|
||||
),
|
||||
|
||||
TreeChild::OkNonExpr(result) => write!(
|
||||
@ -208,20 +296,24 @@ impl TreeChild {
|
||||
Color::White
|
||||
.bold()
|
||||
.on(Color::Green)
|
||||
.paint(format!("{}", result))
|
||||
.paint(result.to_string())
|
||||
),
|
||||
|
||||
TreeChild::ErrorLeaf(desc) => {
|
||||
TreeChild::ErrorLeaf(desc, token_desc) => {
|
||||
let last = desc.len() - 1;
|
||||
|
||||
for (i, item) in desc.iter().enumerate() {
|
||||
write!(f, "{}", Color::White.bold().on(Color::Red).paint(*item))?;
|
||||
for (i, (desc, err_desc)) in desc.iter().enumerate() {
|
||||
write!(f, "{}", Color::White.bold().on(Color::Red).paint(*desc))?;
|
||||
|
||||
write!(f, " {}", Color::White.bold().paint(*err_desc))?;
|
||||
|
||||
if i != last {
|
||||
write!(f, "{}", Color::White.normal().paint(", "))?;
|
||||
}
|
||||
}
|
||||
|
||||
// write!(f, " {}", Color::Black.bold().paint(*token_desc))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -241,9 +333,10 @@ impl TreeItem for TreeChild {
|
||||
|
||||
fn children(&self) -> Cow<[Self::Child]> {
|
||||
match self {
|
||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => {
|
||||
Cow::Borrowed(&[])
|
||||
}
|
||||
TreeChild::OkExpr { .. }
|
||||
| TreeChild::OkShape { .. }
|
||||
| TreeChild::OkNonExpr(..)
|
||||
| TreeChild::ErrorLeaf(..) => Cow::Borrowed(&[]),
|
||||
TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => {
|
||||
Cow::Owned(frame.children_for_formatting(text))
|
||||
}
|
||||
@ -251,47 +344,49 @@ impl TreeItem for TreeChild {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExpandTracer {
|
||||
frame_stack: Vec<ExprFrame>,
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExpandTracer<T: SpannedTypeName> {
|
||||
desc: &'static str,
|
||||
frame_stack: Vec<ExprFrame<T>>,
|
||||
source: Text,
|
||||
}
|
||||
|
||||
impl ExpandTracer {
|
||||
impl<T: SpannedTypeName + Debug> ExpandTracer<T> {
|
||||
pub fn print(&self, source: Text) -> PrintTracer {
|
||||
let root = self
|
||||
.frame_stack
|
||||
.iter()
|
||||
.nth(0)
|
||||
.unwrap()
|
||||
.to_tree_frame(&source);
|
||||
let root = self.frame_stack[0].to_tree_frame(&source);
|
||||
|
||||
PrintTracer { root, source }
|
||||
PrintTracer {
|
||||
root,
|
||||
desc: self.desc,
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(source: Text) -> ExpandTracer {
|
||||
pub fn new(desc: &'static str, source: Text) -> ExpandTracer<T> {
|
||||
let root = ExprFrame {
|
||||
description: "Trace",
|
||||
children: vec![],
|
||||
token: None,
|
||||
error: None,
|
||||
};
|
||||
|
||||
ExpandTracer {
|
||||
desc,
|
||||
frame_stack: vec![root],
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
fn current_frame(&mut self) -> &mut ExprFrame {
|
||||
fn current_frame(&mut self) -> &mut ExprFrame<T> {
|
||||
let frames = &mut self.frame_stack;
|
||||
let last = frames.len() - 1;
|
||||
&mut frames[last]
|
||||
}
|
||||
|
||||
fn pop_frame(&mut self) -> ExprFrame {
|
||||
fn pop_frame(&mut self) -> ExprFrame<T> {
|
||||
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
||||
|
||||
if self.frame_stack.len() == 0 {
|
||||
if self.frame_stack.is_empty() {
|
||||
panic!("Can't pop root tracer frame");
|
||||
}
|
||||
|
||||
@ -300,10 +395,11 @@ impl ExpandTracer {
|
||||
result
|
||||
}
|
||||
|
||||
pub fn start(&mut self, description: &'static str) {
|
||||
pub fn start(&mut self, description: &'static str, token: Option<SpannedToken>) {
|
||||
let frame = ExprFrame {
|
||||
description,
|
||||
children: vec![],
|
||||
token,
|
||||
error: None,
|
||||
};
|
||||
|
||||
@ -311,8 +407,36 @@ impl ExpandTracer {
|
||||
self.debug();
|
||||
}
|
||||
|
||||
pub fn add_expr(&mut self, shape: Expression) {
|
||||
self.current_frame().add_expr(shape);
|
||||
pub fn add_return(&mut self, value: T) {
|
||||
self.current_frame().add_return(value);
|
||||
}
|
||||
|
||||
pub fn add_shape(&mut self, shape: TraceShape) {
|
||||
self.current_frame().add_shape(shape);
|
||||
}
|
||||
|
||||
pub fn add_err_shape(&mut self, shape: TraceShape) {
|
||||
self.current_frame().add_err_shape(shape);
|
||||
}
|
||||
|
||||
pub fn finish(&mut self) {
|
||||
loop {
|
||||
if self.frame_stack.len() == 1 {
|
||||
break;
|
||||
}
|
||||
|
||||
let frame = self.pop_frame();
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(Box::new(frame)));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eof_frame(&mut self) {
|
||||
let current = self.pop_frame();
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(Box::new(current)));
|
||||
}
|
||||
|
||||
pub fn add_result(&mut self, result: impl PrettyDebugWithSource) {
|
||||
@ -326,7 +450,7 @@ impl ExpandTracer {
|
||||
let current = self.pop_frame();
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
.push(FrameChild::Frame(Box::new(current)));
|
||||
}
|
||||
|
||||
pub fn failed(&mut self, error: &ParseError) {
|
||||
@ -334,7 +458,7 @@ impl ExpandTracer {
|
||||
current.error = Some(error.clone());
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
.push(FrameChild::Frame(Box::new(current)));
|
||||
}
|
||||
|
||||
fn debug(&self) {
|
||||
@ -352,6 +476,7 @@ impl ExpandTracer {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PrintTracer {
|
||||
desc: &'static str,
|
||||
root: TreeFrame,
|
||||
source: Text,
|
||||
}
|
||||
@ -360,7 +485,7 @@ impl TreeItem for PrintTracer {
|
||||
type Child = TreeChild;
|
||||
|
||||
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
||||
write!(f, "{}", style.paint("Expansion Trace"))
|
||||
write!(f, "{}", style.paint(self.desc))
|
||||
}
|
||||
|
||||
fn children(&self) -> Cow<[Self::Child]> {
|
56
crates/nu-parser/src/hir/tokens_iterator/into_shapes.rs
Normal file
56
crates/nu-parser/src/hir/tokens_iterator/into_shapes.rs
Normal file
@ -0,0 +1,56 @@
|
||||
use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
||||
use nu_source::{Span, Spanned, SpannedItem};
|
||||
|
||||
pub struct FlatShapes {
|
||||
shapes: Vec<ShapeResult>,
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a FlatShapes {
|
||||
type Item = &'a ShapeResult;
|
||||
type IntoIter = std::slice::Iter<'a, ShapeResult>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.shapes.iter()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait IntoShapes: 'static {
|
||||
fn into_shapes(self, span: Span) -> FlatShapes;
|
||||
}
|
||||
|
||||
impl IntoShapes for FlatShape {
|
||||
fn into_shapes(self, span: Span) -> FlatShapes {
|
||||
FlatShapes {
|
||||
shapes: vec![ShapeResult::Success(self.spanned(span))],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoShapes for Vec<Spanned<FlatShape>> {
|
||||
fn into_shapes(self, _span: Span) -> FlatShapes {
|
||||
FlatShapes {
|
||||
shapes: self.into_iter().map(ShapeResult::Success).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoShapes for Vec<ShapeResult> {
|
||||
fn into_shapes(self, _span: Span) -> FlatShapes {
|
||||
FlatShapes { shapes: self }
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoShapes for () {
|
||||
fn into_shapes(self, _span: Span) -> FlatShapes {
|
||||
FlatShapes { shapes: vec![] }
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoShapes for Option<FlatShape> {
|
||||
fn into_shapes(self, span: Span) -> FlatShapes {
|
||||
match self {
|
||||
Option::None => ().into_shapes(span),
|
||||
Option::Some(shape) => shape.into_shapes(span),
|
||||
}
|
||||
}
|
||||
}
|
30
crates/nu-parser/src/hir/tokens_iterator/pattern.rs
Normal file
30
crates/nu-parser/src/hir/tokens_iterator/pattern.rs
Normal file
@ -0,0 +1,30 @@
|
||||
use crate::parse::token_tree::{ParseErrorFn, SpannedToken, TokenType};
|
||||
use nu_errors::ParseError;
|
||||
use std::borrow::Cow;
|
||||
|
||||
pub struct Pattern<T> {
|
||||
parts: Vec<Box<dyn TokenType<Output = T>>>,
|
||||
}
|
||||
|
||||
impl<T> TokenType for Pattern<T> {
|
||||
type Output = T;
|
||||
|
||||
fn desc(&self) -> Cow<'static, str> {
|
||||
Cow::Borrowed("pattern")
|
||||
}
|
||||
|
||||
fn extract_token_value(
|
||||
&self,
|
||||
token: &SpannedToken,
|
||||
err: ParseErrorFn<Self::Output>,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
for part in &self.parts {
|
||||
match part.extract_token_value(token, err) {
|
||||
Err(_) => {}
|
||||
Ok(result) => return Ok(result),
|
||||
}
|
||||
}
|
||||
|
||||
err()
|
||||
}
|
||||
}
|
105
crates/nu-parser/src/hir/tokens_iterator/state.rs
Normal file
105
crates/nu-parser/src/hir/tokens_iterator/state.rs
Normal file
@ -0,0 +1,105 @@
|
||||
use crate::hir::syntax_shape::flat_shape::ShapeResult;
|
||||
use crate::hir::syntax_shape::ExpandContext;
|
||||
use crate::hir::tokens_iterator::TokensIterator;
|
||||
use crate::parse::token_tree::SpannedToken;
|
||||
|
||||
use getset::Getters;
|
||||
use nu_errors::ParseError;
|
||||
use nu_protocol::SpannedTypeName;
|
||||
use nu_source::Span;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Getters, Debug, Clone)]
|
||||
pub struct TokensIteratorState<'content> {
|
||||
pub(crate) tokens: &'content [SpannedToken],
|
||||
pub(crate) span: Span,
|
||||
pub(crate) index: usize,
|
||||
pub(crate) seen: indexmap::IndexSet<usize>,
|
||||
#[get = "pub"]
|
||||
pub(crate) shapes: Vec<ShapeResult>,
|
||||
pub(crate) errors: indexmap::IndexMap<Span, Vec<String>>,
|
||||
pub(crate) context: Arc<ExpandContext<'content>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Peeked<'content, 'me> {
|
||||
pub(crate) node: Option<&'content SpannedToken>,
|
||||
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||
pub(crate) from: usize,
|
||||
pub(crate) to: usize,
|
||||
}
|
||||
|
||||
impl<'content, 'me> Peeked<'content, 'me> {
|
||||
pub fn commit(&mut self) -> Option<&'content SpannedToken> {
|
||||
let Peeked {
|
||||
node,
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
} = self;
|
||||
|
||||
let node = (*node)?;
|
||||
iterator.commit(*from, *to);
|
||||
Some(node)
|
||||
}
|
||||
|
||||
pub fn rollback(self) {}
|
||||
|
||||
pub fn not_eof(self, expected: &str) -> Result<PeekedNode<'content, 'me>, ParseError> {
|
||||
match self.node {
|
||||
None => Err(ParseError::unexpected_eof(
|
||||
expected.to_string(),
|
||||
self.iterator.eof_span(),
|
||||
)),
|
||||
Some(node) => Ok(PeekedNode {
|
||||
node,
|
||||
iterator: self.iterator,
|
||||
from: self.from,
|
||||
to: self.to,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
||||
peek_error(self.node, self.iterator.eof_span(), expected)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PeekedNode<'content, 'me> {
|
||||
pub(crate) node: &'content SpannedToken,
|
||||
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||
from: usize,
|
||||
to: usize,
|
||||
}
|
||||
|
||||
impl<'content, 'me> PeekedNode<'content, 'me> {
|
||||
pub fn commit(self) -> &'content SpannedToken {
|
||||
let PeekedNode {
|
||||
node,
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
} = self;
|
||||
|
||||
iterator.commit(from, to);
|
||||
node
|
||||
}
|
||||
|
||||
pub fn rollback(self) {}
|
||||
|
||||
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
||||
peek_error(Some(self.node), self.iterator.eof_span(), expected)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek_error(
|
||||
node: Option<&SpannedToken>,
|
||||
eof_span: Span,
|
||||
expected: &'static str,
|
||||
) -> ParseError {
|
||||
match node {
|
||||
None => ParseError::unexpected_eof(expected, eof_span),
|
||||
Some(node) => ParseError::mismatch(expected, node.spanned_type_name()),
|
||||
}
|
||||
}
|
21
crates/nu-parser/src/hir/tokens_iterator/tests.rs
Normal file
21
crates/nu-parser/src/hir/tokens_iterator/tests.rs
Normal file
@ -0,0 +1,21 @@
|
||||
use crate::hir::TokensIterator;
|
||||
use crate::parse::token_tree_builder::TokenTreeBuilder as b;
|
||||
use crate::Span;
|
||||
|
||||
#[test]
|
||||
<<<<<<< HEAD
|
||||
fn supplies_tokens() {
|
||||
let tokens = b::token_list(vec![b::it_var(), b::op("."), b::bare("cpu")]);
|
||||
=======
|
||||
fn supplies_tokens() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let tokens = b::token_list(vec![b::var("it"), b::op("."), b::bare("cpu")]);
|
||||
>>>>>>> master
|
||||
let (tokens, _) = b::build(tokens);
|
||||
|
||||
let tokens = tokens.expect_list();
|
||||
let mut iterator = TokensIterator::new(tokens, Span::unknown());
|
||||
|
||||
iterator.next()?.expect_var();
|
||||
iterator.next()?.expect_dot();
|
||||
iterator.next()?.expect_bare();
|
||||
}
|
87
crates/nu-parser/src/lib.rs
Normal file
87
crates/nu-parser/src/lib.rs
Normal file
@ -0,0 +1,87 @@
|
||||
#[macro_use]
|
||||
pub mod macros;
|
||||
|
||||
pub mod commands;
|
||||
pub mod hir;
|
||||
pub mod parse;
|
||||
pub mod parse_command;
|
||||
|
||||
pub use crate::commands::classified::{
|
||||
external::ExternalCommand, internal::InternalCommand, ClassifiedCommand, ClassifiedPipeline,
|
||||
};
|
||||
pub use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
||||
pub use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax, PipelineShape, SignatureRegistry};
|
||||
pub use crate::hir::tokens_iterator::TokensIterator;
|
||||
pub use crate::parse::files::Files;
|
||||
pub use crate::parse::flag::Flag;
|
||||
pub use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||
pub use crate::parse::parser::Number;
|
||||
pub use crate::parse::parser::{module, pipeline};
|
||||
pub use crate::parse::token_tree::{Delimiter, SpannedToken, Token};
|
||||
pub use crate::parse::token_tree_builder::TokenTreeBuilder;
|
||||
|
||||
use log::log_enabled;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{errln, outln};
|
||||
use nu_source::{nom_input, HasSpan, Text};
|
||||
|
||||
pub fn pipeline_shapes(line: &str, expand_context: ExpandContext) -> Vec<ShapeResult> {
|
||||
let tokens = parse_pipeline(line);
|
||||
|
||||
match tokens {
|
||||
Err(_) => vec![],
|
||||
Ok(v) => {
|
||||
let pipeline = match v.as_pipeline() {
|
||||
Err(_) => return vec![],
|
||||
Ok(v) => v,
|
||||
};
|
||||
|
||||
let tokens = vec![Token::Pipeline(pipeline).into_spanned(v.span())];
|
||||
let mut tokens = TokensIterator::new(&tokens[..], expand_context, v.span());
|
||||
|
||||
let shapes = {
|
||||
// We just constructed a token list that only contains a pipeline, so it can't fail
|
||||
let result = tokens.expand_infallible(PipelineShape);
|
||||
|
||||
if let Some(failure) = result.failed {
|
||||
errln!(
|
||||
"BUG: PipelineShape didn't find a pipeline :: {:#?}",
|
||||
failure
|
||||
);
|
||||
}
|
||||
|
||||
tokens.finish_tracer();
|
||||
|
||||
tokens.state().shapes()
|
||||
};
|
||||
|
||||
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
||||
outln!("");
|
||||
let _ = ptree::print_tree(&tokens.expand_tracer().clone().print(Text::from(line)));
|
||||
outln!("");
|
||||
}
|
||||
|
||||
shapes.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_pipeline(input: &str) -> Result<SpannedToken, ShellError> {
|
||||
let _ = pretty_env_logger::try_init();
|
||||
|
||||
match pipeline(nom_input(input)) {
|
||||
Ok((_rest, val)) => Ok(val),
|
||||
Err(err) => Err(ShellError::parse_error(err)),
|
||||
}
|
||||
}
|
||||
|
||||
pub use parse_pipeline as parse;
|
||||
|
||||
pub fn parse_script(input: &str) -> Result<SpannedToken, ShellError> {
|
||||
let _ = pretty_env_logger::try_init();
|
||||
|
||||
match module(nom_input(input)) {
|
||||
Ok((_rest, val)) => Ok(val),
|
||||
Err(err) => Err(ShellError::parse_error(err)),
|
||||
}
|
||||
}
|
9
crates/nu-parser/src/macros.rs
Normal file
9
crates/nu-parser/src/macros.rs
Normal file
@ -0,0 +1,9 @@
|
||||
#[macro_export]
|
||||
macro_rules! return_ok {
|
||||
($expr:expr) => {
|
||||
match $expr {
|
||||
Ok(val) => return Ok(val),
|
||||
Err(_) => {}
|
||||
}
|
||||
};
|
||||
}
|
@ -1,11 +1,12 @@
|
||||
pub(crate) mod call_node;
|
||||
pub(crate) mod comment;
|
||||
pub(crate) mod files;
|
||||
pub(crate) mod flag;
|
||||
pub(crate) mod number;
|
||||
pub(crate) mod operator;
|
||||
pub(crate) mod parser;
|
||||
pub(crate) mod pipeline;
|
||||
pub(crate) mod token_tree;
|
||||
pub(crate) mod token_tree_builder;
|
||||
pub(crate) mod tokens;
|
||||
pub(crate) mod unit;
|
||||
pub(crate) mod util;
|
@ -1,13 +1,13 @@
|
||||
use crate::parser::TokenNode;
|
||||
use crate::prelude::*;
|
||||
use crate::parse::token_tree::SpannedToken;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||
pub struct CallNode {
|
||||
#[get = "pub(crate)"]
|
||||
head: Box<TokenNode>,
|
||||
head: Box<SpannedToken>,
|
||||
#[get = "pub(crate)"]
|
||||
children: Option<Vec<TokenNode>>,
|
||||
children: Option<Vec<SpannedToken>>,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for CallNode {
|
||||
@ -29,8 +29,8 @@ impl PrettyDebugWithSource for CallNode {
|
||||
}
|
||||
|
||||
impl CallNode {
|
||||
pub fn new(head: Box<TokenNode>, children: Vec<TokenNode>) -> CallNode {
|
||||
if children.len() == 0 {
|
||||
pub fn new(head: Box<SpannedToken>, children: Vec<SpannedToken>) -> CallNode {
|
||||
if children.is_empty() {
|
||||
CallNode {
|
||||
head,
|
||||
children: None,
|
34
crates/nu-parser/src/parse/comment.rs
Normal file
34
crates/nu-parser/src/parse/comment.rs
Normal file
@ -0,0 +1,34 @@
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum CommentKind {
|
||||
Line,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
|
||||
pub struct Comment {
|
||||
pub(crate) kind: CommentKind,
|
||||
pub(crate) text: Span,
|
||||
}
|
||||
|
||||
impl Comment {
|
||||
pub fn line(text: impl Into<Span>) -> Comment {
|
||||
Comment {
|
||||
kind: CommentKind::Line,
|
||||
text: text.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Comment {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
let prefix = match self.kind {
|
||||
CommentKind::Line => b::description("#"),
|
||||
};
|
||||
|
||||
prefix + b::description(self.text.slice(source))
|
||||
}
|
||||
}
|
151
crates/nu-parser/src/parse/files.rs
Normal file
151
crates/nu-parser/src/parse/files.rs
Normal file
@ -0,0 +1,151 @@
|
||||
use derive_new::new;
|
||||
use language_reporting::{FileName, Location};
|
||||
use log::trace;
|
||||
use nu_source::Span;
|
||||
|
||||
#[derive(new, Debug, Clone)]
|
||||
pub struct Files {
|
||||
snippet: String,
|
||||
}
|
||||
|
||||
impl language_reporting::ReportingFiles for Files {
|
||||
type Span = Span;
|
||||
type FileId = usize;
|
||||
|
||||
fn byte_span(
|
||||
&self,
|
||||
_file: Self::FileId,
|
||||
from_index: usize,
|
||||
to_index: usize,
|
||||
) -> Option<Self::Span> {
|
||||
Some(Span::new(from_index, to_index))
|
||||
}
|
||||
|
||||
fn file_id(&self, _tag: Self::Span) -> Self::FileId {
|
||||
0
|
||||
}
|
||||
|
||||
fn file_name(&self, _file: Self::FileId) -> FileName {
|
||||
FileName::Verbatim("shell".to_string())
|
||||
}
|
||||
|
||||
fn byte_index(&self, _file: Self::FileId, _line: usize, _column: usize) -> Option<usize> {
|
||||
unimplemented!("byte_index")
|
||||
}
|
||||
|
||||
fn location(&self, _file: Self::FileId, byte_index: usize) -> Option<Location> {
|
||||
trace!("finding location for {}", byte_index);
|
||||
|
||||
let source = &self.snippet;
|
||||
let mut seen_lines = 0;
|
||||
let mut seen_bytes = 0;
|
||||
|
||||
for (pos, slice) in source.match_indices('\n') {
|
||||
trace!(
|
||||
"searching byte_index={} seen_bytes={} pos={} slice={:?} slice.len={} source={:?}",
|
||||
byte_index,
|
||||
seen_bytes,
|
||||
pos,
|
||||
slice,
|
||||
source.len(),
|
||||
source
|
||||
);
|
||||
|
||||
if pos >= byte_index {
|
||||
trace!(
|
||||
"returning {}:{} seen_lines={} byte_index={} pos={} seen_bytes={}",
|
||||
seen_lines,
|
||||
byte_index,
|
||||
pos,
|
||||
seen_lines,
|
||||
byte_index,
|
||||
seen_bytes
|
||||
);
|
||||
|
||||
return Some(language_reporting::Location::new(
|
||||
seen_lines,
|
||||
byte_index - pos,
|
||||
));
|
||||
} else {
|
||||
seen_lines += 1;
|
||||
seen_bytes = pos;
|
||||
}
|
||||
}
|
||||
|
||||
if seen_lines == 0 {
|
||||
trace!("seen_lines=0 end={}", source.len() - 1);
|
||||
|
||||
// if we got here, there were no newlines in the source
|
||||
Some(language_reporting::Location::new(0, source.len() - 1))
|
||||
} else {
|
||||
trace!(
|
||||
"last line seen_lines={} end={}",
|
||||
seen_lines,
|
||||
source.len() - 1 - byte_index
|
||||
);
|
||||
|
||||
// if we got here and we didn't return, it should mean that we're talking about
|
||||
// the last line
|
||||
Some(language_reporting::Location::new(
|
||||
seen_lines,
|
||||
source.len() - 1 - byte_index,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> {
|
||||
trace!("finding line_span for {}", lineno);
|
||||
|
||||
let source = &self.snippet;
|
||||
let mut seen_lines = 0;
|
||||
let mut seen_bytes = 0;
|
||||
|
||||
for (pos, _) in source.match_indices('\n') {
|
||||
trace!(
|
||||
"lineno={} seen_lines={} seen_bytes={} pos={}",
|
||||
lineno,
|
||||
seen_lines,
|
||||
seen_bytes,
|
||||
pos
|
||||
);
|
||||
|
||||
if seen_lines == lineno {
|
||||
trace!("returning start={} end={}", seen_bytes, pos);
|
||||
// If the number of seen lines is the lineno, seen_bytes is the start of the
|
||||
// line and pos is the end of the line
|
||||
return Some(Span::new(seen_bytes, pos));
|
||||
} else {
|
||||
// If it's not, increment seen_lines, and move seen_bytes to the beginning of
|
||||
// the next line
|
||||
seen_lines += 1;
|
||||
seen_bytes = pos + 1;
|
||||
}
|
||||
}
|
||||
|
||||
if seen_lines == 0 {
|
||||
trace!("returning start={} end={}", 0, self.snippet.len() - 1);
|
||||
|
||||
// if we got here, there were no newlines in the source
|
||||
Some(Span::new(0, self.snippet.len() - 1))
|
||||
} else {
|
||||
trace!(
|
||||
"returning start={} end={}",
|
||||
seen_bytes,
|
||||
self.snippet.len() - 1
|
||||
);
|
||||
|
||||
// if we got here and we didn't return, it should mean that we're talking about
|
||||
// the last line
|
||||
Some(Span::new(seen_bytes, self.snippet.len() - 1))
|
||||
}
|
||||
}
|
||||
|
||||
fn source(&self, span: Self::Span) -> Option<String> {
|
||||
trace!("source(tag={:?}) snippet={:?}", span, self.snippet);
|
||||
|
||||
if span.start() > span.end() || span.end() > self.snippet.len() {
|
||||
return None;
|
||||
}
|
||||
Some(span.slice(&self.snippet).to_string())
|
||||
}
|
||||
}
|
@ -1,8 +1,7 @@
|
||||
use crate::parser::hir::syntax_shape::flat_shape::FlatShape;
|
||||
use crate::prelude::*;
|
||||
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{Span, Spanned, SpannedItem};
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
@ -16,7 +15,6 @@ pub enum FlagKind {
|
||||
pub struct Flag {
|
||||
pub(crate) kind: FlagKind,
|
||||
pub(crate) name: Span,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Flag {
|
||||
@ -31,10 +29,10 @@ impl PrettyDebugWithSource for Flag {
|
||||
}
|
||||
|
||||
impl Flag {
|
||||
pub fn color(&self) -> Spanned<FlatShape> {
|
||||
pub fn color(&self, span: impl Into<Span>) -> Spanned<FlatShape> {
|
||||
match self.kind {
|
||||
FlagKind::Longhand => FlatShape::Flag.spanned(self.span),
|
||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span),
|
||||
FlagKind::Longhand => FlatShape::Flag.spanned(span.into()),
|
||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(span.into()),
|
||||
}
|
||||
}
|
||||
}
|
70
crates/nu-parser/src/parse/number.rs
Normal file
70
crates/nu-parser/src/parse/number.rs
Normal file
@ -0,0 +1,70 @@
|
||||
use crate::hir::syntax_shape::FlatShape;
|
||||
use crate::parse::parser::Number;
|
||||
use bigdecimal::BigDecimal;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Text};
|
||||
use num_bigint::BigInt;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum RawNumber {
|
||||
Int(Span),
|
||||
Decimal(Span),
|
||||
}
|
||||
|
||||
impl HasSpan for RawNumber {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
RawNumber::Int(span) => *span,
|
||||
RawNumber::Decimal(span) => *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for RawNumber {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
RawNumber::Int(span) => b::primitive(span.slice(source)),
|
||||
RawNumber::Decimal(span) => b::primitive(span.slice(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RawNumber {
|
||||
pub fn as_flat_shape(&self) -> FlatShape {
|
||||
match self {
|
||||
RawNumber::Int(_) => FlatShape::Int,
|
||||
RawNumber::Decimal(_) => FlatShape::Decimal,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn int(span: impl Into<Span>) -> RawNumber {
|
||||
let span = span.into();
|
||||
|
||||
RawNumber::Int(span)
|
||||
}
|
||||
|
||||
pub fn decimal(span: impl Into<Span>) -> RawNumber {
|
||||
let span = span.into();
|
||||
|
||||
RawNumber::Decimal(span)
|
||||
}
|
||||
|
||||
pub(crate) fn to_number(self, source: &Text) -> Number {
|
||||
match self {
|
||||
RawNumber::Int(tag) => {
|
||||
if let Ok(big_int) = BigInt::from_str(tag.slice(source)) {
|
||||
Number::Int(big_int)
|
||||
} else {
|
||||
unreachable!("Internal error: could not parse text as BigInt as expected")
|
||||
}
|
||||
}
|
||||
RawNumber::Decimal(tag) => {
|
||||
if let Ok(big_decimal) = BigDecimal::from_str(tag.slice(source)) {
|
||||
Number::Decimal(big_decimal)
|
||||
} else {
|
||||
unreachable!("Internal error: could not parse text as BigDecimal as expected")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
114
crates/nu-parser/src/parse/operator.rs
Normal file
114
crates/nu-parser/src/parse/operator.rs
Normal file
@ -0,0 +1,114 @@
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum CompareOperator {
|
||||
Equal,
|
||||
NotEqual,
|
||||
LessThan,
|
||||
GreaterThan,
|
||||
LessThanOrEqual,
|
||||
GreaterThanOrEqual,
|
||||
Contains,
|
||||
NotContains,
|
||||
}
|
||||
|
||||
impl PrettyDebug for CompareOperator {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::operator(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl CompareOperator {
|
||||
pub fn print(self) -> String {
|
||||
self.as_str().to_string()
|
||||
}
|
||||
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
CompareOperator::Equal => "==",
|
||||
CompareOperator::NotEqual => "!=",
|
||||
CompareOperator::LessThan => "<",
|
||||
CompareOperator::GreaterThan => ">",
|
||||
CompareOperator::LessThanOrEqual => "<=",
|
||||
CompareOperator::GreaterThanOrEqual => ">=",
|
||||
CompareOperator::Contains => "=~",
|
||||
CompareOperator::NotContains => "!~",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for CompareOperator {
|
||||
fn from(input: &str) -> CompareOperator {
|
||||
if let Ok(output) = CompareOperator::from_str(input) {
|
||||
output
|
||||
} else {
|
||||
unreachable!("Internal error: CompareOperator from failed")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for CompareOperator {
|
||||
type Err = ();
|
||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
||||
match input {
|
||||
"==" => Ok(CompareOperator::Equal),
|
||||
"!=" => Ok(CompareOperator::NotEqual),
|
||||
"<" => Ok(CompareOperator::LessThan),
|
||||
">" => Ok(CompareOperator::GreaterThan),
|
||||
"<=" => Ok(CompareOperator::LessThanOrEqual),
|
||||
">=" => Ok(CompareOperator::GreaterThanOrEqual),
|
||||
"=~" => Ok(CompareOperator::Contains),
|
||||
"!~" => Ok(CompareOperator::NotContains),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum EvaluationOperator {
|
||||
Dot,
|
||||
DotDot,
|
||||
}
|
||||
|
||||
impl PrettyDebug for EvaluationOperator {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::operator(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl EvaluationOperator {
|
||||
pub fn print(self) -> String {
|
||||
self.as_str().to_string()
|
||||
}
|
||||
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
EvaluationOperator::Dot => ".",
|
||||
EvaluationOperator::DotDot => "..",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for EvaluationOperator {
|
||||
fn from(input: &str) -> EvaluationOperator {
|
||||
if let Ok(output) = EvaluationOperator::from_str(input) {
|
||||
output
|
||||
} else {
|
||||
unreachable!("Internal error: EvaluationOperator 'from' failed")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for EvaluationOperator {
|
||||
type Err = ();
|
||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
||||
match input {
|
||||
"." => Ok(EvaluationOperator::Dot),
|
||||
".." => Ok(EvaluationOperator::DotDot),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,24 +1,32 @@
|
||||
use crate::parser::TokenNode;
|
||||
use crate::prelude::*;
|
||||
use crate::{SpannedToken, Token};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{DebugDocBuilder, PrettyDebugWithSource, Span, Spanned};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
|
||||
pub struct Pipeline {
|
||||
#[get = "pub"]
|
||||
pub(crate) parts: Vec<PipelineElement>,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl IntoSpanned for Pipeline {
|
||||
type Output = Spanned<Pipeline>;
|
||||
|
||||
fn into_spanned(self, span: impl Into<Span>) -> Self::Output {
|
||||
self.spanned(span.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
pub struct Tokens {
|
||||
pub(crate) tokens: Vec<TokenNode>,
|
||||
pub(crate) tokens: Vec<SpannedToken>,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl Tokens {
|
||||
pub fn iter(&self) -> impl Iterator<Item = &TokenNode> {
|
||||
pub fn iter(&self) -> impl Iterator<Item = &SpannedToken> {
|
||||
self.tokens.iter()
|
||||
}
|
||||
}
|
||||
@ -39,7 +47,7 @@ impl HasSpan for PipelineElement {
|
||||
}
|
||||
|
||||
impl PipelineElement {
|
||||
pub fn new(pipe: Option<Span>, tokens: Spanned<Vec<TokenNode>>) -> PipelineElement {
|
||||
pub fn new(pipe: Option<Span>, tokens: Spanned<Vec<SpannedToken>>) -> PipelineElement {
|
||||
PipelineElement {
|
||||
pipe,
|
||||
tokens: Tokens {
|
||||
@ -49,7 +57,7 @@ impl PipelineElement {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tokens(&self) -> &[TokenNode] {
|
||||
pub fn tokens(&self) -> &[SpannedToken] {
|
||||
&self.tokens.tokens
|
||||
}
|
||||
}
|
||||
@ -66,9 +74,9 @@ impl PrettyDebugWithSource for Pipeline {
|
||||
impl PrettyDebugWithSource for PipelineElement {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.tokens.iter().map(|token| match token {
|
||||
TokenNode::Whitespace(_) => b::blank(),
|
||||
token => token.pretty_debug(source),
|
||||
self.tokens.iter().map(|token| match token.unspanned() {
|
||||
Token::Whitespace => b::blank(),
|
||||
_ => token.pretty_debug(source),
|
||||
}),
|
||||
b::space(),
|
||||
)
|
515
crates/nu-parser/src/parse/token_tree.rs
Normal file
515
crates/nu-parser/src/parse/token_tree.rs
Normal file
@ -0,0 +1,515 @@
|
||||
#![allow(clippy::type_complexity)]
|
||||
use crate::parse::{call_node::*, comment::*, flag::*, number::*, operator::*, pipeline::*};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
||||
};
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Deref;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum Token {
|
||||
Number(RawNumber),
|
||||
CompareOperator(CompareOperator),
|
||||
EvaluationOperator(EvaluationOperator),
|
||||
String(Span),
|
||||
Variable(Span),
|
||||
ItVariable(Span),
|
||||
ExternalCommand(Span),
|
||||
ExternalWord,
|
||||
GlobPattern,
|
||||
Bare,
|
||||
Garbage,
|
||||
|
||||
Call(CallNode),
|
||||
Delimited(DelimitedNode),
|
||||
Pipeline(Pipeline),
|
||||
Flag(Flag),
|
||||
Comment(Comment),
|
||||
Whitespace,
|
||||
Separator,
|
||||
}
|
||||
|
||||
macro_rules! token_type {
|
||||
(struct $name:tt (desc: $desc:tt) -> $out:ty { |$span:ident, $pat:pat| => $do:expr }) => {
|
||||
pub struct $name;
|
||||
|
||||
impl TokenType for $name {
|
||||
type Output = $out;
|
||||
|
||||
fn desc(&self) -> Cow<'static, str> {
|
||||
Cow::Borrowed($desc)
|
||||
}
|
||||
|
||||
fn extract_token_value(
|
||||
&self,
|
||||
token: &SpannedToken,
|
||||
err: ParseErrorFn<$out>,
|
||||
) -> Result<$out, ParseError> {
|
||||
let $span = token.span();
|
||||
|
||||
match *token.unspanned() {
|
||||
$pat => Ok($do),
|
||||
_ => err(),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
(struct $name:tt (desc: $desc:tt) -> $out:ty { $pat:pat => $do:expr }) => {
|
||||
pub struct $name;
|
||||
|
||||
impl TokenType for $name {
|
||||
type Output = $out;
|
||||
|
||||
fn desc(&self) -> Cow<'static, str> {
|
||||
Cow::Borrowed($desc)
|
||||
}
|
||||
|
||||
fn extract_token_value(
|
||||
&self,
|
||||
token: &SpannedToken,
|
||||
err: ParseErrorFn<$out>,
|
||||
) -> Result<$out, ParseError> {
|
||||
match token.unspanned().clone() {
|
||||
$pat => Ok($do),
|
||||
_ => err(),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub type ParseErrorFn<'a, T> = &'a dyn Fn() -> Result<T, ParseError>;
|
||||
|
||||
token_type!(struct IntType (desc: "integer") -> RawNumber {
|
||||
Token::Number(number @ RawNumber::Int(_)) => number
|
||||
});
|
||||
|
||||
token_type!(struct DecimalType (desc: "decimal") -> RawNumber {
|
||||
Token::Number(number @ RawNumber::Decimal(_)) => number
|
||||
});
|
||||
|
||||
token_type!(struct StringType (desc: "string") -> (Span, Span) {
|
||||
|outer, Token::String(inner)| => (inner, outer)
|
||||
});
|
||||
|
||||
token_type!(struct BareType (desc: "word") -> Span {
|
||||
|span, Token::Bare| => span
|
||||
});
|
||||
|
||||
token_type!(struct DotType (desc: "dot") -> Span {
|
||||
|span, Token::EvaluationOperator(EvaluationOperator::Dot)| => span
|
||||
});
|
||||
|
||||
token_type!(struct DotDotType (desc: "dotdot") -> Span {
|
||||
|span, Token::EvaluationOperator(EvaluationOperator::DotDot)| => span
|
||||
});
|
||||
|
||||
token_type!(struct CompareOperatorType (desc: "compare operator") -> (Span, CompareOperator) {
|
||||
|span, Token::CompareOperator(operator)| => (span, operator)
|
||||
});
|
||||
|
||||
token_type!(struct ExternalWordType (desc: "external word") -> Span {
|
||||
|span, Token::ExternalWord| => span
|
||||
});
|
||||
|
||||
token_type!(struct ExternalCommandType (desc: "external command") -> (Span, Span) {
|
||||
|outer, Token::ExternalCommand(inner)| => (inner, outer)
|
||||
});
|
||||
|
||||
token_type!(struct CommentType (desc: "comment") -> (Comment, Span) {
|
||||
|outer, Token::Comment(comment)| => (comment, outer)
|
||||
});
|
||||
|
||||
token_type!(struct SeparatorType (desc: "separator") -> Span {
|
||||
|span, Token::Separator| => span
|
||||
});
|
||||
|
||||
token_type!(struct WhitespaceType (desc: "whitespace") -> Span {
|
||||
|span, Token::Whitespace| => span
|
||||
});
|
||||
|
||||
token_type!(struct WordType (desc: "word") -> Span {
|
||||
|span, Token::Bare| => span
|
||||
});
|
||||
|
||||
token_type!(struct ItVarType (desc: "$it") -> (Span, Span) {
|
||||
|outer, Token::ItVariable(inner)| => (inner, outer)
|
||||
});
|
||||
|
||||
token_type!(struct VarType (desc: "variable") -> (Span, Span) {
|
||||
|outer, Token::Variable(inner)| => (inner, outer)
|
||||
});
|
||||
|
||||
token_type!(struct PipelineType (desc: "pipeline") -> Pipeline {
|
||||
Token::Pipeline(pipeline) => pipeline
|
||||
});
|
||||
|
||||
token_type!(struct BlockType (desc: "block") -> DelimitedNode {
|
||||
Token::Delimited(block @ DelimitedNode { delimiter: Delimiter::Brace, .. }) => block
|
||||
});
|
||||
|
||||
token_type!(struct SquareType (desc: "square") -> DelimitedNode {
|
||||
Token::Delimited(square @ DelimitedNode { delimiter: Delimiter::Square, .. }) => square
|
||||
});
|
||||
|
||||
pub trait TokenType {
|
||||
type Output;
|
||||
|
||||
fn desc(&self) -> Cow<'static, str>;
|
||||
|
||||
fn extract_token_value(
|
||||
&self,
|
||||
token: &SpannedToken,
|
||||
err: ParseErrorFn<Self::Output>,
|
||||
) -> Result<Self::Output, ParseError>;
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn into_spanned(self, span: impl Into<Span>) -> SpannedToken {
|
||||
SpannedToken {
|
||||
unspanned: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||
pub struct SpannedToken {
|
||||
#[get = "pub"]
|
||||
unspanned: Token,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl Deref for SpannedToken {
|
||||
type Target = Token;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.unspanned
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for SpannedToken {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for SpannedToken {
|
||||
fn type_name(&self) -> &'static str {
|
||||
self.unspanned.type_name()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for SpannedToken {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self.unspanned() {
|
||||
Token::Number(number) => number.pretty_debug(source),
|
||||
Token::CompareOperator(operator) => operator.pretty_debug(source),
|
||||
Token::EvaluationOperator(operator) => operator.pretty_debug(source),
|
||||
Token::String(_) | Token::GlobPattern | Token::Bare => {
|
||||
b::primitive(self.span.slice(source))
|
||||
}
|
||||
Token::Variable(_) => b::var(self.span.slice(source)),
|
||||
Token::ItVariable(_) => b::keyword(self.span.slice(source)),
|
||||
Token::ExternalCommand(_) => b::description(self.span.slice(source)),
|
||||
Token::ExternalWord => b::description(self.span.slice(source)),
|
||||
Token::Call(call) => call.pretty_debug(source),
|
||||
Token::Delimited(delimited) => delimited.pretty_debug(source),
|
||||
Token::Pipeline(pipeline) => pipeline.pretty_debug(source),
|
||||
Token::Flag(flag) => flag.pretty_debug(source),
|
||||
Token::Garbage => b::error(self.span.slice(source)),
|
||||
Token::Whitespace => b::typed(
|
||||
"whitespace",
|
||||
b::description(format!("{:?}", self.span.slice(source))),
|
||||
),
|
||||
Token::Separator => b::typed(
|
||||
"separator",
|
||||
b::description(format!("{:?}", self.span.slice(source))),
|
||||
),
|
||||
Token::Comment(comment) => {
|
||||
b::typed("comment", b::description(comment.text.slice(source)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for Token {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
Token::Number(_) => "number",
|
||||
Token::CompareOperator(_) => "comparison operator",
|
||||
Token::EvaluationOperator(EvaluationOperator::Dot) => "dot",
|
||||
Token::EvaluationOperator(EvaluationOperator::DotDot) => "dot dot",
|
||||
Token::String(_) => "string",
|
||||
Token::Variable(_) => "variable",
|
||||
Token::ItVariable(_) => "it variable",
|
||||
Token::ExternalCommand(_) => "external command",
|
||||
Token::ExternalWord => "external word",
|
||||
Token::GlobPattern => "glob pattern",
|
||||
Token::Bare => "word",
|
||||
Token::Call(_) => "command",
|
||||
Token::Delimited(d) => d.type_name(),
|
||||
Token::Pipeline(_) => "pipeline",
|
||||
Token::Flag(_) => "flag",
|
||||
Token::Garbage => "garbage",
|
||||
Token::Whitespace => "whitespace",
|
||||
Token::Separator => "separator",
|
||||
Token::Comment(_) => "comment",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&SpannedToken> for Span {
|
||||
fn from(token: &SpannedToken) -> Span {
|
||||
token.span
|
||||
}
|
||||
}
|
||||
|
||||
impl SpannedToken {
|
||||
pub fn as_external_arg(&self, source: &Text) -> String {
|
||||
self.span().slice(source).to_string()
|
||||
}
|
||||
|
||||
pub fn source<'a>(&self, source: &'a Text) -> &'a str {
|
||||
self.span().slice(source)
|
||||
}
|
||||
|
||||
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
|
||||
match self.unspanned() {
|
||||
Token::Variable(inner_span) => Ok((self.span(), *inner_span)),
|
||||
_ => Err(ShellError::type_error("variable", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_bare(&self) -> bool {
|
||||
match self.unspanned() {
|
||||
Token::Bare => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_string(&self) -> bool {
|
||||
match self.unspanned() {
|
||||
Token::String(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_number(&self) -> bool {
|
||||
match self.unspanned() {
|
||||
Token::Number(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_string(&self) -> Option<(Span, Span)> {
|
||||
match self.unspanned() {
|
||||
Token::String(inner_span) => Some((self.span(), *inner_span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_pattern(&self) -> bool {
|
||||
match self.unspanned() {
|
||||
Token::GlobPattern => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_word(&self) -> bool {
|
||||
match self.unspanned() {
|
||||
Token::Bare => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_int(&self) -> bool {
|
||||
match self.unspanned() {
|
||||
Token::Number(RawNumber::Int(_)) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_dot(&self) -> bool {
|
||||
match self.unspanned() {
|
||||
Token::EvaluationOperator(EvaluationOperator::Dot) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_block(&self) -> Option<(Spanned<&[SpannedToken]>, (Span, Span))> {
|
||||
match self.unspanned() {
|
||||
Token::Delimited(DelimitedNode {
|
||||
delimiter,
|
||||
children,
|
||||
spans,
|
||||
}) if *delimiter == Delimiter::Brace => {
|
||||
Some(((&children[..]).spanned(self.span()), *spans))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_external(&self) -> bool {
|
||||
match self.unspanned() {
|
||||
Token::ExternalCommand(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Flag> {
|
||||
match self.unspanned() {
|
||||
Token::Flag(flag @ Flag { .. }) if value == flag.name().slice(source) => Some(*flag),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_pipeline(&self) -> Result<Pipeline, ParseError> {
|
||||
match self.unspanned() {
|
||||
Token::Pipeline(pipeline) => Ok(pipeline.clone()),
|
||||
_ => Err(ParseError::mismatch("pipeline", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_whitespace(&self) -> bool {
|
||||
match self.unspanned() {
|
||||
Token::Whitespace => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct DelimitedNode {
|
||||
pub(crate) delimiter: Delimiter,
|
||||
pub(crate) spans: (Span, Span),
|
||||
pub(crate) children: Vec<SpannedToken>,
|
||||
}
|
||||
|
||||
impl HasSpan for DelimitedNode {
|
||||
fn span(&self) -> Span {
|
||||
self.spans.0.until(self.spans.1)
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for DelimitedNode {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
self.delimiter.open(),
|
||||
b::intersperse(
|
||||
self.children.iter().map(|child| child.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
self.delimiter.close(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl DelimitedNode {
|
||||
pub fn type_name(&self) -> &'static str {
|
||||
match self.delimiter {
|
||||
Delimiter::Brace => "braced expression",
|
||||
Delimiter::Paren => "parenthesized expression",
|
||||
Delimiter::Square => "array literal or index operator",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum Delimiter {
|
||||
Paren,
|
||||
Brace,
|
||||
Square,
|
||||
}
|
||||
|
||||
impl Delimiter {
|
||||
pub(crate) fn open(self) -> &'static str {
|
||||
match self {
|
||||
Delimiter::Paren => "(",
|
||||
Delimiter::Brace => "{",
|
||||
Delimiter::Square => "[",
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn close(self) -> &'static str {
|
||||
match self {
|
||||
Delimiter::Paren => ")",
|
||||
Delimiter::Brace => "}",
|
||||
Delimiter::Square => "]",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct PathNode {
|
||||
head: Box<SpannedToken>,
|
||||
tail: Vec<SpannedToken>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl SpannedToken {
|
||||
pub fn expect_external(&self) -> Span {
|
||||
match self.unspanned() {
|
||||
Token::ExternalCommand(span) => *span,
|
||||
_ => panic!(
|
||||
"Only call expect_external if you checked is_external first, found {:?}",
|
||||
self
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_string(&self) -> (Span, Span) {
|
||||
match self.unspanned() {
|
||||
Token::String(inner_span) => (self.span(), *inner_span),
|
||||
other => panic!("Expected string, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_list(&self) -> Spanned<Vec<SpannedToken>> {
|
||||
match self.unspanned() {
|
||||
Token::Pipeline(pipeline) => pipeline
|
||||
.parts()
|
||||
.iter()
|
||||
.flat_map(|part| part.tokens())
|
||||
.cloned()
|
||||
.collect::<Vec<SpannedToken>>()
|
||||
.spanned(self.span()),
|
||||
_ => panic!("Expected list, found {:?}", self),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_pattern(&self) -> Span {
|
||||
match self.unspanned() {
|
||||
Token::GlobPattern => self.span(),
|
||||
_ => panic!("Expected pattern, found {:?}", self),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_var(&self) -> (Span, Span) {
|
||||
match self.unspanned() {
|
||||
Token::Variable(inner_span) => (self.span(), *inner_span),
|
||||
Token::ItVariable(inner_span) => (self.span(), *inner_span),
|
||||
other => panic!("Expected var, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_dot(&self) -> Span {
|
||||
match self.unspanned() {
|
||||
Token::EvaluationOperator(EvaluationOperator::Dot) => self.span(),
|
||||
other => panic!("Expected dot, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_bare(&self) -> Span {
|
||||
match self.unspanned() {
|
||||
Token::Bare => self.span(),
|
||||
_ => panic!("Expected bare, found {:?}", self),
|
||||
}
|
||||
}
|
||||
}
|
@ -1,32 +1,31 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::parser::parse::flag::{Flag, FlagKind};
|
||||
use crate::parser::parse::operator::Operator;
|
||||
use crate::parser::parse::pipeline::{Pipeline, PipelineElement};
|
||||
use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||
use crate::parser::parse::tokens::{RawNumber, UnspannedToken};
|
||||
use crate::parser::CallNode;
|
||||
use nu_source::Spanned;
|
||||
use crate::parse::call_node::CallNode;
|
||||
use crate::parse::comment::Comment;
|
||||
use crate::parse::flag::{Flag, FlagKind};
|
||||
use crate::parse::number::RawNumber;
|
||||
use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||
use crate::parse::pipeline::{Pipeline, PipelineElement};
|
||||
use crate::parse::token_tree::{DelimitedNode, Delimiter, SpannedToken, Token};
|
||||
use bigdecimal::BigDecimal;
|
||||
use nu_source::{Span, Spanned, SpannedItem};
|
||||
use num_bigint::BigInt;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct TokenTreeBuilder {
|
||||
pos: usize,
|
||||
output: String,
|
||||
}
|
||||
|
||||
impl TokenTreeBuilder {
|
||||
pub fn new() -> TokenTreeBuilder {
|
||||
TokenTreeBuilder {
|
||||
pos: 0,
|
||||
output: String::new(),
|
||||
}
|
||||
pub fn new() -> Self {
|
||||
Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>;
|
||||
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> SpannedToken + 'static>;
|
||||
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Spanned<CallNode> + 'static>;
|
||||
|
||||
impl TokenTreeBuilder {
|
||||
pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
|
||||
pub fn build(block: impl FnOnce(&mut Self) -> SpannedToken) -> (SpannedToken, String) {
|
||||
let mut builder = TokenTreeBuilder::new();
|
||||
let node = block(&mut builder);
|
||||
(node, builder.output)
|
||||
@ -78,8 +77,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Pipeline(Pipeline::new(input, span.into()))
|
||||
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Pipeline(Pipeline::new(input)).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
@ -92,11 +91,31 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_token_list(input: Vec<TokenNode>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Nodes(input.spanned(span.into()))
|
||||
pub fn spanned_token_list(input: Vec<SpannedToken>, span: impl Into<Span>) -> SpannedToken {
|
||||
let span = span.into();
|
||||
Token::Pipeline(Pipeline::new(vec![PipelineElement::new(
|
||||
None,
|
||||
input.spanned(span),
|
||||
)]))
|
||||
.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn op(input: impl Into<Operator>) -> CurriedToken {
|
||||
pub fn garbage(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_garbage(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_garbage(span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Garbage.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn op(input: impl Into<CompareOperator>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
@ -104,12 +123,42 @@ impl TokenTreeBuilder {
|
||||
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_op(input, Span::new(start, end))
|
||||
TokenTreeBuilder::spanned_cmp_op(input, Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_op(input: impl Into<Operator>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::Operator(input.into()).into_token(span))
|
||||
pub fn spanned_cmp_op(
|
||||
input: impl Into<CompareOperator>,
|
||||
span: impl Into<Span>,
|
||||
) -> SpannedToken {
|
||||
Token::CompareOperator(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn dot() -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(".");
|
||||
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_eval_op(".", Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn dotdot() -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume("..");
|
||||
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_eval_op("..", Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_eval_op(
|
||||
input: impl Into<EvaluationOperator>,
|
||||
span: impl Into<Span>,
|
||||
) -> SpannedToken {
|
||||
Token::EvaluationOperator(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
||||
@ -128,8 +177,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::String(input.into()).into_token(span))
|
||||
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
Token::String(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn bare(input: impl Into<String>) -> CurriedToken {
|
||||
@ -143,8 +192,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_bare(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::Bare.into_token(span))
|
||||
pub fn spanned_bare(span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Bare.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn pattern(input: impl Into<String>) -> CurriedToken {
|
||||
@ -158,8 +207,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_pattern(input: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::GlobPattern.into_token(input))
|
||||
pub fn spanned_pattern(input: impl Into<Span>) -> SpannedToken {
|
||||
Token::GlobPattern.into_spanned(input)
|
||||
}
|
||||
|
||||
pub fn external_word(input: impl Into<String>) -> CurriedToken {
|
||||
@ -173,8 +222,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_external_word(input: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::ExternalWord.into_token(input))
|
||||
pub fn spanned_external_word(input: impl Into<Span>) -> SpannedToken {
|
||||
Token::ExternalWord.into_spanned(input)
|
||||
}
|
||||
|
||||
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
||||
@ -192,8 +241,11 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::ExternalCommand(inner.into()).into_token(outer))
|
||||
pub fn spanned_external_command(
|
||||
inner: impl Into<Span>,
|
||||
outer: impl Into<Span>,
|
||||
) -> SpannedToken {
|
||||
Token::ExternalCommand(inner.into()).into_spanned(outer)
|
||||
}
|
||||
|
||||
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
||||
@ -224,8 +276,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::Number(input.into()).into_token(span))
|
||||
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Number(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn var(input: impl Into<String>) -> CurriedToken {
|
||||
@ -239,8 +291,21 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::Variable(input.into()).into_token(span))
|
||||
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Variable(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn it_var() -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("$");
|
||||
let (inner_start, end) = b.consume("it");
|
||||
|
||||
TokenTreeBuilder::spanned_it_var(Span::new(inner_start, end), Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_it_var(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
Token::ItVariable(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn flag(input: impl Into<String>) -> CurriedToken {
|
||||
@ -254,8 +319,9 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into(), span.into()))
|
||||
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
let span = span.into();
|
||||
Token::Flag(Flag::new(FlagKind::Longhand, input.into())).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
|
||||
@ -269,8 +335,10 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into(), span.into()))
|
||||
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
let span = span.into();
|
||||
|
||||
Token::Flag(Flag::new(FlagKind::Shorthand, input.into())).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
||||
@ -290,17 +358,20 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_call(input: Vec<TokenNode>, span: impl Into<Span>) -> Spanned<CallNode> {
|
||||
if input.len() == 0 {
|
||||
pub fn spanned_call(input: Vec<SpannedToken>, span: impl Into<Span>) -> Spanned<CallNode> {
|
||||
if input.is_empty() {
|
||||
panic!("BUG: spanned call (TODO)")
|
||||
}
|
||||
|
||||
let mut input = input.into_iter();
|
||||
|
||||
let head = input.next().unwrap();
|
||||
if let Some(head) = input.next() {
|
||||
let tail = input.collect();
|
||||
|
||||
CallNode::new(Box::new(head), tail).spanned(span.into())
|
||||
} else {
|
||||
unreachable!("Internal error: spanned_call failed")
|
||||
}
|
||||
}
|
||||
|
||||
fn consume_delimiter(
|
||||
@ -308,7 +379,7 @@ impl TokenTreeBuilder {
|
||||
input: Vec<CurriedToken>,
|
||||
_open: &str,
|
||||
_close: &str,
|
||||
) -> (Span, Span, Span, Vec<TokenNode>) {
|
||||
) -> (Span, Span, Span, Vec<SpannedToken>) {
|
||||
let (start_open_paren, end_open_paren) = self.consume("(");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
@ -333,13 +404,12 @@ impl TokenTreeBuilder {
|
||||
}
|
||||
|
||||
pub fn spanned_parens(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
input: impl Into<Vec<SpannedToken>>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
) -> SpannedToken {
|
||||
Token::Delimited(DelimitedNode::new(Delimiter::Paren, spans, input.into()))
|
||||
.into_spanned(span.into())
|
||||
}
|
||||
|
||||
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
@ -351,13 +421,12 @@ impl TokenTreeBuilder {
|
||||
}
|
||||
|
||||
pub fn spanned_square(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
input: impl Into<Vec<SpannedToken>>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
) -> SpannedToken {
|
||||
Token::Delimited(DelimitedNode::new(Delimiter::Square, spans, input.into()))
|
||||
.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
@ -369,19 +438,18 @@ impl TokenTreeBuilder {
|
||||
}
|
||||
|
||||
pub fn spanned_brace(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
input: impl Into<Vec<SpannedToken>>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
) -> SpannedToken {
|
||||
Token::Delimited(DelimitedNode::new(Delimiter::Brace, spans, input.into()))
|
||||
.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn sp() -> CurriedToken {
|
||||
Box::new(|b| {
|
||||
let (start, end) = b.consume(" ");
|
||||
TokenNode::Whitespace(Span::new(start, end))
|
||||
Token::Whitespace.into_spanned((start, end))
|
||||
})
|
||||
}
|
||||
|
||||
@ -394,8 +462,40 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_ws(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Whitespace(span.into())
|
||||
pub fn spanned_ws(span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Whitespace.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn sep(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
TokenTreeBuilder::spanned_sep(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_sep(span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Separator.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn comment(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let outer_start = b.pos;
|
||||
b.consume("#");
|
||||
let (start, end) = b.consume(&input);
|
||||
let outer_end = b.pos;
|
||||
|
||||
TokenTreeBuilder::spanned_comment((start, end), (outer_start, outer_end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
let span = span.into();
|
||||
|
||||
Token::Comment(Comment::line(input)).into_spanned(span)
|
||||
}
|
||||
|
||||
fn consume(&mut self, input: &str) -> (usize, usize) {
|
@ -1,13 +1,20 @@
|
||||
use crate::parser::Operator;
|
||||
use crate::prelude::*;
|
||||
use nu_source::{Spanned, Text};
|
||||
use crate::parse::parser::Number;
|
||||
use crate::{CompareOperator, EvaluationOperator};
|
||||
use bigdecimal::BigDecimal;
|
||||
use nu_protocol::ShellTypeName;
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||
Text,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum UnspannedToken {
|
||||
Number(RawNumber),
|
||||
Operator(Operator),
|
||||
CompareOperator(CompareOperator),
|
||||
EvaluationOperator(EvaluationOperator),
|
||||
String(Span),
|
||||
Variable(Span),
|
||||
ExternalCommand(Span),
|
||||
@ -29,7 +36,9 @@ impl ShellTypeName for UnspannedToken {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
UnspannedToken::Number(_) => "number",
|
||||
UnspannedToken::Operator(..) => "operator",
|
||||
UnspannedToken::CompareOperator(..) => "comparison operator",
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => "dot",
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => "dotdot",
|
||||
UnspannedToken::String(_) => "string",
|
||||
UnspannedToken::Variable(_) => "variable",
|
||||
UnspannedToken::ExternalCommand(_) => "syntax error",
|
||||
@ -79,9 +88,19 @@ impl RawNumber {
|
||||
|
||||
pub(crate) fn to_number(self, source: &Text) -> Number {
|
||||
match self {
|
||||
RawNumber::Int(tag) => Number::Int(BigInt::from_str(tag.slice(source)).unwrap()),
|
||||
RawNumber::Int(tag) => {
|
||||
if let Ok(int) = BigInt::from_str(tag.slice(source)) {
|
||||
Number::Int(int)
|
||||
} else {
|
||||
unreachable!("Internal error: to_number failed")
|
||||
}
|
||||
}
|
||||
RawNumber::Decimal(tag) => {
|
||||
Number::Decimal(BigDecimal::from_str(tag.slice(source)).unwrap())
|
||||
if let Ok(decimal) = BigDecimal::from_str(tag.slice(source)) {
|
||||
Number::Decimal(decimal)
|
||||
} else {
|
||||
unreachable!("Internal error: to_number failed")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -105,7 +124,8 @@ impl PrettyDebugWithSource for Token {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self.unspanned {
|
||||
UnspannedToken::Number(number) => number.pretty_debug(source),
|
||||
UnspannedToken::Operator(operator) => operator.pretty(),
|
||||
UnspannedToken::CompareOperator(operator) => operator.pretty(),
|
||||
UnspannedToken::EvaluationOperator(operator) => operator.pretty(),
|
||||
UnspannedToken::String(_) => b::primitive(self.span.slice(source)),
|
||||
UnspannedToken::Variable(_) => b::var(self.span.slice(source)),
|
||||
UnspannedToken::ExternalCommand(_) => b::primitive(self.span.slice(source)),
|
||||
@ -149,9 +169,9 @@ impl Token {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_operator(&self) -> Option<Spanned<Operator>> {
|
||||
pub fn extract_operator(&self) -> Option<Spanned<CompareOperator>> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::Operator(operator) => Some(operator.spanned(self.span)),
|
||||
UnspannedToken::CompareOperator(operator) => Some(operator.spanned(self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
127
crates/nu-parser/src/parse/unit.rs
Normal file
127
crates/nu-parser/src/parse/unit.rs
Normal file
@ -0,0 +1,127 @@
|
||||
use crate::parse::parser::Number;
|
||||
use nu_protocol::{Primitive, UntaggedValue};
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||
use num_traits::ToPrimitive;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum Unit {
|
||||
// Filesize units
|
||||
Byte,
|
||||
Kilobyte,
|
||||
Megabyte,
|
||||
Gigabyte,
|
||||
Terabyte,
|
||||
Petabyte,
|
||||
|
||||
// Duration units
|
||||
Second,
|
||||
Minute,
|
||||
Hour,
|
||||
Day,
|
||||
Week,
|
||||
Month,
|
||||
Year,
|
||||
}
|
||||
|
||||
impl PrettyDebug for Unit {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::keyword(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_number_to_u64(number: &Number) -> u64 {
|
||||
match number {
|
||||
Number::Int(big_int) => {
|
||||
if let Some(x) = big_int.to_u64() {
|
||||
x
|
||||
} else {
|
||||
unreachable!("Internal error: convert_number_to_u64 given incompatible number")
|
||||
}
|
||||
}
|
||||
Number::Decimal(big_decimal) => {
|
||||
if let Some(x) = big_decimal.to_u64() {
|
||||
x
|
||||
} else {
|
||||
unreachable!("Internal error: convert_number_to_u64 given incompatible number")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Unit {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Unit::Byte => "B",
|
||||
Unit::Kilobyte => "KB",
|
||||
Unit::Megabyte => "MB",
|
||||
Unit::Gigabyte => "GB",
|
||||
Unit::Terabyte => "TB",
|
||||
Unit::Petabyte => "PB",
|
||||
Unit::Second => "s",
|
||||
Unit::Minute => "m",
|
||||
Unit::Hour => "h",
|
||||
Unit::Day => "d",
|
||||
Unit::Week => "w",
|
||||
Unit::Month => "M",
|
||||
Unit::Year => "y",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compute(self, size: &Number) -> UntaggedValue {
|
||||
let size = size.clone();
|
||||
|
||||
match self {
|
||||
Unit::Byte => number(size),
|
||||
Unit::Kilobyte => number(size * 1024),
|
||||
Unit::Megabyte => number(size * 1024 * 1024),
|
||||
Unit::Gigabyte => number(size * 1024 * 1024 * 1024),
|
||||
Unit::Terabyte => number(size * 1024 * 1024 * 1024 * 1024),
|
||||
Unit::Petabyte => number(size * 1024 * 1024 * 1024 * 1024 * 1024),
|
||||
Unit::Second => duration(convert_number_to_u64(&size)),
|
||||
Unit::Minute => duration(60 * convert_number_to_u64(&size)),
|
||||
Unit::Hour => duration(60 * 60 * convert_number_to_u64(&size)),
|
||||
Unit::Day => duration(24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||
Unit::Week => duration(7 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||
Unit::Month => duration(30 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||
Unit::Year => duration(365 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn number(number: impl Into<Number>) -> UntaggedValue {
|
||||
let number = number.into();
|
||||
|
||||
match number {
|
||||
Number::Int(int) => UntaggedValue::Primitive(Primitive::Int(int)),
|
||||
Number::Decimal(decimal) => UntaggedValue::Primitive(Primitive::Decimal(decimal)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn duration(secs: u64) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Duration(secs))
|
||||
}
|
||||
|
||||
impl FromStr for Unit {
|
||||
type Err = ();
|
||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
||||
match input {
|
||||
"B" | "b" => Ok(Unit::Byte),
|
||||
"KB" | "kb" | "Kb" | "K" | "k" => Ok(Unit::Kilobyte),
|
||||
"MB" | "mb" | "Mb" => Ok(Unit::Megabyte),
|
||||
"GB" | "gb" | "Gb" => Ok(Unit::Gigabyte),
|
||||
"TB" | "tb" | "Tb" => Ok(Unit::Terabyte),
|
||||
"PB" | "pb" | "Pb" => Ok(Unit::Petabyte),
|
||||
"s" => Ok(Unit::Second),
|
||||
"m" => Ok(Unit::Minute),
|
||||
"h" => Ok(Unit::Hour),
|
||||
"d" => Ok(Unit::Day),
|
||||
"w" => Ok(Unit::Week),
|
||||
"M" => Ok(Unit::Month),
|
||||
"y" => Ok(Unit::Year),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
338
crates/nu-parser/src/parse_command.rs
Normal file
338
crates/nu-parser/src/parse_command.rs
Normal file
@ -0,0 +1,338 @@
|
||||
use crate::hir::syntax_shape::{
|
||||
BackoffColoringMode, ExpandSyntax, MaybeSpaceShape, MaybeWhitespaceEof,
|
||||
};
|
||||
use crate::hir::SpannedExpression;
|
||||
use crate::TokensIterator;
|
||||
use crate::{
|
||||
hir::{self, NamedArguments},
|
||||
Flag,
|
||||
};
|
||||
use log::trace;
|
||||
use nu_errors::{ArgumentError, ParseError};
|
||||
use nu_protocol::{NamedType, PositionalType, Signature, SyntaxShape};
|
||||
use nu_source::{HasFallibleSpan, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||
|
||||
type OptionalHeadTail = (Option<Vec<hir::SpannedExpression>>, Option<NamedArguments>);
|
||||
|
||||
pub fn parse_command_tail(
|
||||
config: &Signature,
|
||||
tail: &mut TokensIterator,
|
||||
command_span: Span,
|
||||
) -> Result<Option<OptionalHeadTail>, ParseError> {
|
||||
let mut named = NamedArguments::new();
|
||||
let mut found_error: Option<ParseError> = None;
|
||||
let mut rest_signature = config.clone();
|
||||
|
||||
trace!(target: "nu::parse::trace_remaining", "");
|
||||
|
||||
trace_remaining("nodes", &tail);
|
||||
|
||||
for (name, kind) in &config.named {
|
||||
trace!(target: "nu::parse::trace_remaining", "looking for {} : {:?}", name, kind);
|
||||
|
||||
tail.move_to(0);
|
||||
|
||||
match &kind.0 {
|
||||
NamedType::Switch => {
|
||||
let switch = extract_switch(name, tail);
|
||||
|
||||
match switch {
|
||||
None => named.insert_switch(name, None),
|
||||
Some((_, flag)) => {
|
||||
named.insert_switch(name, Some(*flag));
|
||||
rest_signature.remove_named(name);
|
||||
tail.color_shape(flag.color(flag.span));
|
||||
}
|
||||
}
|
||||
}
|
||||
NamedType::Mandatory(syntax_type) => {
|
||||
match extract_mandatory(config, name, tail, command_span) {
|
||||
Err(err) => {
|
||||
// remember this error, but continue coloring
|
||||
found_error = Some(err);
|
||||
}
|
||||
Ok((pos, flag)) => {
|
||||
let result = expand_flag(tail, *syntax_type, flag, pos);
|
||||
|
||||
tail.move_to(0);
|
||||
|
||||
match result {
|
||||
Ok(expr) => {
|
||||
named.insert_mandatory(name, expr);
|
||||
rest_signature.remove_named(name);
|
||||
}
|
||||
Err(_) => {
|
||||
found_error = Some(ParseError::argument_error(
|
||||
config.name.clone().spanned(flag.span),
|
||||
ArgumentError::MissingValueForName(name.to_string()),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
NamedType::Optional(syntax_type) => {
|
||||
match extract_optional(name, tail) {
|
||||
Err(err) => {
|
||||
// remember this error, but continue coloring
|
||||
found_error = Some(err);
|
||||
}
|
||||
Ok(Some((pos, flag))) => {
|
||||
let result = expand_flag(tail, *syntax_type, flag, pos);
|
||||
|
||||
tail.move_to(0);
|
||||
|
||||
match result {
|
||||
Ok(expr) => {
|
||||
named.insert_optional(name, Some(expr));
|
||||
rest_signature.remove_named(name);
|
||||
}
|
||||
Err(_) => {
|
||||
found_error = Some(ParseError::argument_error(
|
||||
config.name.clone().spanned(flag.span),
|
||||
ArgumentError::MissingValueForName(name.to_string()),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None) => {
|
||||
named.insert_optional(name, None);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
trace_remaining("after named", &tail);
|
||||
|
||||
let mut positional = vec![];
|
||||
|
||||
match continue_parsing_positionals(&config, tail, &mut rest_signature, command_span) {
|
||||
Ok(positionals) => {
|
||||
positional = positionals;
|
||||
}
|
||||
Err(reason) => {
|
||||
if found_error.is_none() && !tail.source().contains("help") {
|
||||
found_error = Some(reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trace_remaining("after positional", &tail);
|
||||
|
||||
if let Some((syntax_type, _)) = config.rest_positional {
|
||||
let mut out = vec![];
|
||||
|
||||
loop {
|
||||
if found_error.is_some() {
|
||||
break;
|
||||
}
|
||||
|
||||
tail.move_to(0);
|
||||
|
||||
trace_remaining("start rest", &tail);
|
||||
eat_any_whitespace(tail);
|
||||
trace_remaining("after whitespace", &tail);
|
||||
|
||||
if tail.at_end() {
|
||||
break;
|
||||
}
|
||||
|
||||
match tail.expand_syntax(syntax_type) {
|
||||
Err(err) => found_error = Some(err),
|
||||
Ok(next) => out.push(next),
|
||||
};
|
||||
}
|
||||
|
||||
positional.extend(out);
|
||||
}
|
||||
|
||||
eat_any_whitespace(tail);
|
||||
|
||||
// Consume any remaining tokens with backoff coloring mode
|
||||
tail.expand_infallible(BackoffColoringMode::new(rest_signature.allowed()));
|
||||
|
||||
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
||||
// this solution.
|
||||
tail.sort_shapes();
|
||||
|
||||
if let Some(err) = found_error {
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
trace_remaining("after rest", &tail);
|
||||
|
||||
trace!(target: "nu::parse::trace_remaining", "Constructed positional={:?} named={:?}", positional, named);
|
||||
|
||||
let positional = if positional.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(positional)
|
||||
};
|
||||
|
||||
// TODO: Error if extra unconsumed positional arguments
|
||||
|
||||
let named = if named.named.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(named)
|
||||
};
|
||||
|
||||
trace!(target: "nu::parse::trace_remaining", "Normalized positional={:?} named={:?}", positional, named);
|
||||
|
||||
Ok(Some((positional, named)))
|
||||
}
|
||||
|
||||
pub fn continue_parsing_positionals(
|
||||
config: &Signature,
|
||||
tail: &mut TokensIterator,
|
||||
rest_signature: &mut Signature,
|
||||
command_span: Span,
|
||||
) -> Result<Vec<SpannedExpression>, ParseError> {
|
||||
let mut positional = vec![];
|
||||
|
||||
for arg in &config.positional {
|
||||
trace!(target: "nu::parse::trace_remaining", "Processing positional {:?}", arg);
|
||||
|
||||
tail.move_to(0);
|
||||
|
||||
let result = expand_spaced_expr(arg.0.syntax_type(), tail);
|
||||
|
||||
match result {
|
||||
Err(_) => match &arg.0 {
|
||||
PositionalType::Mandatory(..) => {
|
||||
return Err(ParseError::argument_error(
|
||||
config.name.clone().spanned(command_span),
|
||||
ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()),
|
||||
))
|
||||
}
|
||||
PositionalType::Optional(..) => {
|
||||
if tail.expand_syntax(MaybeWhitespaceEof).is_ok() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
Ok(result) => {
|
||||
rest_signature.shift_positional();
|
||||
positional.push(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(positional)
|
||||
}
|
||||
|
||||
fn eat_any_whitespace(tail: &mut TokensIterator) {
|
||||
loop {
|
||||
match tail.expand_infallible(MaybeSpaceShape) {
|
||||
None => break,
|
||||
Some(_) => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expand_flag(
|
||||
token_nodes: &mut TokensIterator,
|
||||
syntax_type: SyntaxShape,
|
||||
flag: Spanned<Flag>,
|
||||
pos: usize,
|
||||
) -> Result<SpannedExpression, ()> {
|
||||
token_nodes.color_shape(flag.color(flag.span));
|
||||
|
||||
let result = token_nodes.atomic_parse(|token_nodes| {
|
||||
token_nodes.move_to(pos);
|
||||
|
||||
if token_nodes.at_end() {
|
||||
return Err(ParseError::unexpected_eof("flag", Span::unknown()));
|
||||
}
|
||||
|
||||
let expr = expand_spaced_expr(syntax_type, token_nodes)?;
|
||||
|
||||
Ok(expr)
|
||||
});
|
||||
|
||||
let expr = result.map_err(|_| ())?;
|
||||
Ok(expr)
|
||||
}
|
||||
|
||||
fn expand_spaced_expr<
|
||||
T: HasFallibleSpan + PrettyDebugWithSource + Clone + std::fmt::Debug + 'static,
|
||||
>(
|
||||
syntax: impl ExpandSyntax<Output = Result<T, ParseError>>,
|
||||
token_nodes: &mut TokensIterator,
|
||||
) -> Result<T, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||
token_nodes.expand_syntax(syntax)
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_switch(
|
||||
name: &str,
|
||||
tokens: &mut hir::TokensIterator<'_>,
|
||||
) -> Option<(usize, Spanned<Flag>)> {
|
||||
let source = tokens.source();
|
||||
tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span())))
|
||||
}
|
||||
|
||||
fn extract_mandatory(
|
||||
config: &Signature,
|
||||
name: &str,
|
||||
tokens: &mut hir::TokensIterator<'_>,
|
||||
span: Span,
|
||||
) -> Result<(usize, Spanned<Flag>), ParseError> {
|
||||
let source = tokens.source();
|
||||
let flag = tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span())));
|
||||
|
||||
match flag {
|
||||
None => Err(ParseError::argument_error(
|
||||
config.name.clone().spanned(span),
|
||||
ArgumentError::MissingMandatoryFlag(name.to_string()),
|
||||
)),
|
||||
|
||||
Some((pos, flag)) => {
|
||||
tokens.remove(pos);
|
||||
Ok((pos, flag))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_optional(
|
||||
name: &str,
|
||||
tokens: &mut hir::TokensIterator<'_>,
|
||||
) -> Result<Option<(usize, Spanned<Flag>)>, ParseError> {
|
||||
let source = tokens.source();
|
||||
let flag = tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span())));
|
||||
|
||||
match flag {
|
||||
None => Ok(None),
|
||||
Some((pos, flag)) => {
|
||||
tokens.remove(pos);
|
||||
Ok(Some((pos, flag)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>) {
|
||||
let offset = tail.clone().span_at_cursor();
|
||||
let source = tail.source();
|
||||
|
||||
trace!(
|
||||
target: "nu::parse::trace_remaining",
|
||||
"{} = {}",
|
||||
desc,
|
||||
itertools::join(
|
||||
tail.debug_remaining()
|
||||
.iter()
|
||||
.map(|val| {
|
||||
if val.span().start() == offset.start() {
|
||||
format!("<|> %{}%", val.debug(&source))
|
||||
} else {
|
||||
format!("%{}%", val.debug(&source))
|
||||
}
|
||||
}),
|
||||
" "
|
||||
)
|
||||
);
|
||||
}
|
24
crates/nu-plugin/Cargo.toml
Normal file
24
crates/nu-plugin/Cargo.toml
Normal file
@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "nu-plugin"
|
||||
version = "0.9.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Nushell Plugin"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||
nu-value-ext = { path = "../nu-value-ext", version = "0.9.0" }
|
||||
|
||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
serde_json = "1.0.44"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.9.0", path = "../nu-build" }
|
3
crates/nu-plugin/build.rs
Normal file
3
crates/nu-plugin/build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
4
crates/nu-plugin/src/lib.rs
Normal file
4
crates/nu-plugin/src/lib.rs
Normal file
@ -0,0 +1,4 @@
|
||||
mod plugin;
|
||||
pub mod test_helpers;
|
||||
|
||||
pub use crate::plugin::{serve_plugin, Plugin};
|
@ -1,5 +1,5 @@
|
||||
use crate::Signature;
|
||||
use crate::{CallInfo, ReturnValue, ShellError, Value};
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{outln, CallInfo, ReturnValue, Signature, Value};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::io;
|
||||
|
||||
@ -24,9 +24,9 @@ pub trait Plugin {
|
||||
}
|
||||
|
||||
pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
let args = std::env::args();
|
||||
let mut args = std::env::args();
|
||||
if args.len() > 1 {
|
||||
let input = args.skip(1).next();
|
||||
let input = args.nth(1);
|
||||
|
||||
let input = match input {
|
||||
Some(arg) => std::fs::read_to_string(arg),
|
213
crates/nu-plugin/src/test_helpers.rs
Normal file
213
crates/nu-plugin/src/test_helpers.rs
Normal file
@ -0,0 +1,213 @@
|
||||
use crate::Plugin;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{CallInfo, EvaluatedArgs, ReturnSuccess, ReturnValue, UntaggedValue, Value};
|
||||
use nu_source::Tag;
|
||||
|
||||
pub struct PluginTest<'a, T: Plugin> {
|
||||
plugin: &'a mut T,
|
||||
call_info: CallInfo,
|
||||
input: Value,
|
||||
}
|
||||
|
||||
impl<'a, T: Plugin> PluginTest<'a, T> {
|
||||
pub fn for_plugin(plugin: &'a mut T) -> Self {
|
||||
PluginTest {
|
||||
plugin,
|
||||
call_info: CallStub::new().create(),
|
||||
input: UntaggedValue::nothing().into_value(Tag::unknown()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn args(&mut self, call_info: CallInfo) -> &mut PluginTest<'a, T> {
|
||||
self.call_info = call_info;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn configure(&mut self, callback: impl FnOnce(Vec<String>)) -> &mut PluginTest<'a, T> {
|
||||
let signature = self
|
||||
.plugin
|
||||
.config()
|
||||
.expect("There was a problem configuring the plugin.");
|
||||
callback(signature.named.keys().map(String::from).collect());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn input(&mut self, value: Value) -> &mut PluginTest<'a, T> {
|
||||
self.input = value;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn test(&mut self) -> Result<Vec<ReturnValue>, ShellError> {
|
||||
let return_values = self.plugin.filter(self.input.clone());
|
||||
|
||||
let mut return_values = match return_values {
|
||||
Ok(filtered) => filtered,
|
||||
Err(reason) => return Err(reason),
|
||||
};
|
||||
|
||||
let end = self.plugin.end_filter();
|
||||
|
||||
match end {
|
||||
Ok(filter_ended) => return_values.extend(filter_ended),
|
||||
Err(reason) => return Err(reason),
|
||||
}
|
||||
|
||||
self.plugin.quit();
|
||||
Ok(return_values)
|
||||
}
|
||||
|
||||
pub fn setup(
|
||||
&mut self,
|
||||
callback: impl FnOnce(&mut T, Result<Vec<ReturnValue>, ShellError>),
|
||||
) -> &mut PluginTest<'a, T> {
|
||||
let call_stub = self.call_info.clone();
|
||||
|
||||
self.configure(|flags_configured| {
|
||||
let flags_registered = &call_stub.args.named;
|
||||
|
||||
let flag_passed = match flags_registered {
|
||||
Some(names) => Some(names.keys().map(String::from).collect::<Vec<String>>()),
|
||||
None => None,
|
||||
};
|
||||
|
||||
if let Some(flags) = flag_passed {
|
||||
for flag in flags {
|
||||
assert!(
|
||||
flags_configured.iter().any(|f| *f == flag),
|
||||
format!(
|
||||
"The flag you passed ({}) is not configured in the plugin.",
|
||||
flag
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let began = self.plugin.begin_filter(call_stub);
|
||||
|
||||
let return_values = match began {
|
||||
Ok(values) => Ok(values),
|
||||
Err(reason) => Err(reason),
|
||||
};
|
||||
|
||||
callback(self.plugin, return_values);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn plugin<T: Plugin>(plugin: &mut T) -> PluginTest<T> {
|
||||
PluginTest::for_plugin(plugin)
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct CallStub {
|
||||
positionals: Vec<Value>,
|
||||
flags: IndexMap<String, Value>,
|
||||
}
|
||||
|
||||
impl CallStub {
|
||||
pub fn new() -> Self {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
pub fn with_named_parameter(&mut self, name: &str, value: Value) -> &mut Self {
|
||||
self.flags.insert(name.to_string(), value);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_long_flag(&mut self, name: &str) -> &mut Self {
|
||||
self.flags.insert(
|
||||
name.to_string(),
|
||||
UntaggedValue::boolean(true).into_value(Tag::unknown()),
|
||||
);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_parameter(&mut self, name: &str) -> Result<&mut Self, ShellError> {
|
||||
let fields: Vec<Value> = name
|
||||
.split('.')
|
||||
.map(|s| UntaggedValue::string(s.to_string()).into_value(Tag::unknown()))
|
||||
.collect();
|
||||
|
||||
self.positionals.push(value::column_path(&fields)?);
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn create(&self) -> CallInfo {
|
||||
CallInfo {
|
||||
args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())),
|
||||
name_tag: Tag::unknown(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_return_value_at(
|
||||
for_results: Result<Vec<Result<ReturnSuccess, ShellError>>, ShellError>,
|
||||
at: usize,
|
||||
) -> Value {
|
||||
let return_values = for_results
|
||||
.expect("Failed! This seems to be an error getting back the results from the plugin.");
|
||||
|
||||
for (idx, item) in return_values.iter().enumerate() {
|
||||
let item = match item {
|
||||
Ok(return_value) => return_value,
|
||||
Err(reason) => panic!(format!("{}", reason)),
|
||||
};
|
||||
|
||||
if idx == at {
|
||||
if let Some(value) = item.raw_value() {
|
||||
return value;
|
||||
} else {
|
||||
panic!("Internal error: could not get raw value in expect_return_value_at")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
panic!(format!(
|
||||
"Couldn't get return value from stream at {}. (There are {} items)",
|
||||
at,
|
||||
return_values.len() - 1
|
||||
))
|
||||
}
|
||||
|
||||
pub mod value {
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Primitive, TaggedDictBuilder, UntaggedValue, Value};
|
||||
use nu_source::Tag;
|
||||
use nu_value_ext::ValueExt;
|
||||
use num_bigint::BigInt;
|
||||
|
||||
pub fn get_data(for_value: Value, key: &str) -> Value {
|
||||
for_value.get_data(&key.to_string()).borrow().clone()
|
||||
}
|
||||
|
||||
pub fn int(i: impl Into<BigInt>) -> Value {
|
||||
UntaggedValue::Primitive(Primitive::Int(i.into())).into_untagged_value()
|
||||
}
|
||||
|
||||
pub fn string(input: impl Into<String>) -> Value {
|
||||
UntaggedValue::string(input.into()).into_untagged_value()
|
||||
}
|
||||
|
||||
pub fn structured_sample_record(key: &str, value: &str) -> Value {
|
||||
let mut record = TaggedDictBuilder::new(Tag::unknown());
|
||||
record.insert_untagged(key, UntaggedValue::string(value));
|
||||
record.into_value()
|
||||
}
|
||||
|
||||
pub fn unstructured_sample_record(value: &str) -> Value {
|
||||
UntaggedValue::string(value).into_value(Tag::unknown())
|
||||
}
|
||||
|
||||
pub fn table(list: &[Value]) -> Value {
|
||||
UntaggedValue::table(list).into_untagged_value()
|
||||
}
|
||||
|
||||
pub fn column_path(paths: &[Value]) -> Result<Value, ShellError> {
|
||||
Ok(UntaggedValue::Primitive(Primitive::ColumnPath(
|
||||
table(&paths.to_vec()).as_column_path()?.item,
|
||||
))
|
||||
.into_untagged_value())
|
||||
}
|
||||
}
|
41
crates/nu-protocol/Cargo.toml
Normal file
41
crates/nu-protocol/Cargo.toml
Normal file
@ -0,0 +1,41 @@
|
||||
[package]
|
||||
name = "nu-protocol"
|
||||
version = "0.9.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core values and protocols for Nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
chrono = { version = "0.4.10", features = ["serde"] }
|
||||
num-traits = "0.2.8"
|
||||
serde_bytes = "0.11.3"
|
||||
getset = "0.0.9"
|
||||
derive-new = "0.5.8"
|
||||
ansi_term = "0.12.1"
|
||||
language-reporting = "0.4.0"
|
||||
nom = "5.0.1"
|
||||
nom_locate = "1.0.0"
|
||||
nom-tracable = "0.4.1"
|
||||
typetag = "0.1.4"
|
||||
query_interface = "0.3.5"
|
||||
byte-unit = "3.0.3"
|
||||
natural = "0.3.0"
|
||||
|
||||
# implement conversions
|
||||
serde_yaml = "0.8"
|
||||
toml = "0.5.5"
|
||||
serde_json = "1.0.44"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.9.0", path = "../nu-build" }
|
3
crates/nu-protocol/build.rs
Normal file
3
crates/nu-protocol/build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
114
crates/nu-protocol/src/call_info.rs
Normal file
114
crates/nu-protocol/src/call_info.rs
Normal file
@ -0,0 +1,114 @@
|
||||
use crate::value::Value;
|
||||
use derive_new::new;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_source::Tag;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Associated information for the call of a command, including the args passed to the command and a tag that spans the name of the command being called
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct CallInfo {
|
||||
/// The arguments associated with this call
|
||||
pub args: EvaluatedArgs,
|
||||
/// The tag (underline-able position) of the name of the call itself
|
||||
pub name_tag: Tag,
|
||||
}
|
||||
|
||||
/// The set of positional and named arguments, after their values have been evaluated.
|
||||
///
|
||||
/// * Positional arguments are those who are given as values, without any associated flag. For example, in `foo arg1 arg2`, both `arg1` and `arg2` are positional arguments.
|
||||
/// * Named arguments are those associated with a flag. For example, `foo --given bar` the named argument would be name `given` and the value `bar`.
|
||||
#[derive(Debug, Default, new, Serialize, Deserialize, Clone)]
|
||||
pub struct EvaluatedArgs {
|
||||
pub positional: Option<Vec<Value>>,
|
||||
pub named: Option<IndexMap<String, Value>>,
|
||||
}
|
||||
|
||||
impl EvaluatedArgs {
|
||||
/// Retrieve a subset of positional arguments starting at a given position
|
||||
pub fn slice_from(&self, from: usize) -> Vec<Value> {
|
||||
let positional = &self.positional;
|
||||
|
||||
match positional {
|
||||
None => vec![],
|
||||
Some(list) => list[from..].to_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the nth positional argument, if possible
|
||||
pub fn nth(&self, pos: usize) -> Option<&Value> {
|
||||
match &self.positional {
|
||||
None => None,
|
||||
Some(array) => array.get(pos),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the nth positional argument, error if not possible
|
||||
pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> {
|
||||
match &self.positional {
|
||||
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
||||
Some(array) => match array.get(pos) {
|
||||
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
||||
Some(item) => Ok(item),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the number of positional arguments available
|
||||
pub fn len(&self) -> usize {
|
||||
match &self.positional {
|
||||
None => 0,
|
||||
Some(array) => array.len(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return if there are no positional arguments
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
/// Return true if the set of named arguments contains the name provided
|
||||
pub fn has(&self, name: &str) -> bool {
|
||||
match &self.named {
|
||||
None => false,
|
||||
Some(named) => named.contains_key(name),
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the corresponding Value for the named argument given, if possible
|
||||
pub fn get(&self, name: &str) -> Option<&Value> {
|
||||
match &self.named {
|
||||
None => None,
|
||||
Some(named) => named.get(name),
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterates over the positional arguments
|
||||
pub fn positional_iter(&self) -> PositionalIter<'_> {
|
||||
match &self.positional {
|
||||
None => PositionalIter::Empty,
|
||||
Some(v) => {
|
||||
let iter = v.iter();
|
||||
PositionalIter::Array(iter)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator to help iterate over positional arguments
|
||||
pub enum PositionalIter<'a> {
|
||||
Empty,
|
||||
Array(std::slice::Iter<'a, Value>),
|
||||
}
|
||||
|
||||
impl<'a> Iterator for PositionalIter<'a> {
|
||||
type Item = &'a Value;
|
||||
|
||||
/// The required `next` function to implement the Iterator trait
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
PositionalIter::Empty => None,
|
||||
PositionalIter::Array(iter) => iter.next(),
|
||||
}
|
||||
}
|
||||
}
|
26
crates/nu-protocol/src/lib.rs
Normal file
26
crates/nu-protocol/src/lib.rs
Normal file
@ -0,0 +1,26 @@
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
mod call_info;
|
||||
mod maybe_owned;
|
||||
mod return_value;
|
||||
mod signature;
|
||||
mod syntax_shape;
|
||||
mod type_name;
|
||||
mod type_shape;
|
||||
mod value;
|
||||
|
||||
pub use crate::call_info::{CallInfo, EvaluatedArgs};
|
||||
pub use crate::maybe_owned::MaybeOwned;
|
||||
pub use crate::return_value::{CommandAction, ReturnSuccess, ReturnValue};
|
||||
pub use crate::signature::{NamedType, PositionalType, Signature};
|
||||
pub use crate::syntax_shape::SyntaxShape;
|
||||
pub use crate::type_name::{PrettyType, ShellTypeName, SpannedTypeName};
|
||||
pub use crate::type_shape::{Row as RowType, Type};
|
||||
pub use crate::value::column_path::{did_you_mean, ColumnPath, PathMember, UnspannedPathMember};
|
||||
pub use crate::value::dict::{Dictionary, TaggedDictBuilder};
|
||||
pub use crate::value::evaluate::{Evaluate, EvaluateTrait, Scope};
|
||||
pub use crate::value::primitive::Primitive;
|
||||
pub use crate::value::primitive::{format_date, format_duration, format_primitive};
|
||||
pub use crate::value::range::{Range, RangeInclusion};
|
||||
pub use crate::value::{UntaggedValue, Value};
|
17
crates/nu-protocol/src/macros.rs
Normal file
17
crates/nu-protocol/src/macros.rs
Normal file
@ -0,0 +1,17 @@
|
||||
/// Outputs to standard out
|
||||
///
|
||||
/// Note: this exists to differentiate between intentional writing to stdout
|
||||
/// and stray printlns left by accident
|
||||
#[macro_export]
|
||||
macro_rules! outln {
|
||||
($($tokens:tt)*) => { println!($($tokens)*) }
|
||||
}
|
||||
|
||||
/// Outputs to standard error
|
||||
///
|
||||
/// Note: this exists to differentiate between intentional writing to stdout
|
||||
/// and stray printlns left by accident
|
||||
#[macro_export]
|
||||
macro_rules! errln {
|
||||
($($tokens:tt)*) => { eprintln!($($tokens)*) }
|
||||
}
|
18
crates/nu-protocol/src/maybe_owned.rs
Normal file
18
crates/nu-protocol/src/maybe_owned.rs
Normal file
@ -0,0 +1,18 @@
|
||||
#![allow(clippy::should_implement_trait)]
|
||||
|
||||
/// Helper type to allow passing something that may potentially be owned, but could also be borrowed
|
||||
#[derive(Debug)]
|
||||
pub enum MaybeOwned<'a, T> {
|
||||
Owned(T),
|
||||
Borrowed(&'a T),
|
||||
}
|
||||
|
||||
impl<T> MaybeOwned<'_, T> {
|
||||
/// Allows the borrowing of an owned value or passes out the borrowed value
|
||||
pub fn borrow(&self) -> &T {
|
||||
match self {
|
||||
MaybeOwned::Owned(v) => v,
|
||||
MaybeOwned::Borrowed(v) => v,
|
||||
}
|
||||
}
|
||||
}
|
111
crates/nu-protocol/src/return_value.rs
Normal file
111
crates/nu-protocol/src/return_value.rs
Normal file
@ -0,0 +1,111 @@
|
||||
use crate::value::Value;
|
||||
use nu_errors::ShellError;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// The inner set of actions for the command processor. Each denotes a way to change state in the processor without changing it directly from the command itself.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum CommandAction {
|
||||
/// Change to a new directory or path (in non-filesystem situations)
|
||||
ChangePath(String),
|
||||
/// Exit out of Nu
|
||||
Exit,
|
||||
/// Display an error
|
||||
Error(ShellError),
|
||||
/// Enter a new shell at the given path
|
||||
EnterShell(String),
|
||||
/// Convert the value given from one type to another
|
||||
AutoConvert(Value, String),
|
||||
/// Enter a value shell, one that allows exploring inside of a Value
|
||||
EnterValueShell(Value),
|
||||
/// Enter the help shell, which allows exploring the help system
|
||||
EnterHelpShell(Value),
|
||||
/// Go to the previous shell in the shell ring buffer
|
||||
PreviousShell,
|
||||
/// Go to the next shell in the shell ring buffer
|
||||
NextShell,
|
||||
/// Leave the current shell. If it's the last shell, exit out of Nu
|
||||
LeaveShell,
|
||||
}
|
||||
|
||||
impl PrettyDebug for CommandAction {
|
||||
/// Get a command action ready to be pretty-printed
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
CommandAction::ChangePath(path) => b::typed("change path", b::description(path)),
|
||||
CommandAction::Exit => b::description("exit"),
|
||||
CommandAction::Error(_) => b::error("error"),
|
||||
CommandAction::AutoConvert(_, extension) => {
|
||||
b::typed("auto convert", b::description(extension))
|
||||
}
|
||||
CommandAction::EnterShell(s) => b::typed("enter shell", b::description(s)),
|
||||
CommandAction::EnterValueShell(v) => b::typed("enter value shell", v.pretty()),
|
||||
CommandAction::EnterHelpShell(v) => b::typed("enter help shell", v.pretty()),
|
||||
CommandAction::PreviousShell => b::description("previous shell"),
|
||||
CommandAction::NextShell => b::description("next shell"),
|
||||
CommandAction::LeaveShell => b::description("leave shell"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The fundamental success type in the pipeline. Commands return these values as their main responsibility
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum ReturnSuccess {
|
||||
/// A value to be used or shown to the user
|
||||
Value(Value),
|
||||
/// A debug-enabled value to be used or shown to the user
|
||||
DebugValue(Value),
|
||||
/// An action to be performed as values pass out of the command. These are performed rather than passed to the next command in the pipeline
|
||||
Action(CommandAction),
|
||||
}
|
||||
|
||||
impl PrettyDebug for ReturnSuccess {
|
||||
/// Get a return success ready to be pretty-printed
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
ReturnSuccess::Value(value) => b::typed("value", value.pretty()),
|
||||
ReturnSuccess::DebugValue(value) => b::typed("debug value", value.pretty()),
|
||||
ReturnSuccess::Action(action) => b::typed("action", action.pretty()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The core Result type for pipelines
|
||||
pub type ReturnValue = Result<ReturnSuccess, ShellError>;
|
||||
|
||||
impl From<Value> for ReturnValue {
|
||||
fn from(v: Value) -> Self {
|
||||
Ok(ReturnSuccess::Value(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl ReturnSuccess {
|
||||
/// Get to the contained Value, if possible
|
||||
pub fn raw_value(&self) -> Option<Value> {
|
||||
match self {
|
||||
ReturnSuccess::Value(raw) => Some(raw.clone()),
|
||||
ReturnSuccess::DebugValue(raw) => Some(raw.clone()),
|
||||
ReturnSuccess::Action(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function for an action to change the the path
|
||||
pub fn change_cwd(path: String) -> ReturnValue {
|
||||
Ok(ReturnSuccess::Action(CommandAction::ChangePath(path)))
|
||||
}
|
||||
|
||||
/// Helper function to create simple values for returning
|
||||
pub fn value(input: impl Into<Value>) -> ReturnValue {
|
||||
Ok(ReturnSuccess::Value(input.into()))
|
||||
}
|
||||
|
||||
/// Helper function to create simple debug-enabled values for returning
|
||||
pub fn debug_value(input: impl Into<Value>) -> ReturnValue {
|
||||
Ok(ReturnSuccess::DebugValue(input.into()))
|
||||
}
|
||||
|
||||
/// Helper function for creating actions
|
||||
pub fn action(input: CommandAction) -> ReturnValue {
|
||||
Ok(ReturnSuccess::Action(input))
|
||||
}
|
||||
}
|
@ -1,70 +1,81 @@
|
||||
// TODO: Temporary redirect
|
||||
pub(crate) use crate::context::CommandRegistry;
|
||||
use crate::evaluate::{evaluate_baseline_expr, Scope};
|
||||
use crate::parser::{hir, hir::SyntaxShape};
|
||||
use crate::prelude::*;
|
||||
use derive_new::new;
|
||||
use crate::syntax_shape::SyntaxShape;
|
||||
use crate::type_shape::Type;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// The types of named parameter that a command can have
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub enum NamedType {
|
||||
/// A flag without any associated argument. eg) `foo --bar`
|
||||
Switch,
|
||||
/// A mandatory flag, with associated argument. eg) `foo --required xyz`
|
||||
Mandatory(SyntaxShape),
|
||||
/// An optional flag, with associated argument. eg) `foo --optional abc`
|
||||
Optional(SyntaxShape),
|
||||
}
|
||||
|
||||
/// The type of positional arguments
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum PositionalType {
|
||||
/// A mandatory postional argument with the expected shape of the value
|
||||
Mandatory(String, SyntaxShape),
|
||||
/// An optional positional argument with the expected shape of the value
|
||||
Optional(String, SyntaxShape),
|
||||
}
|
||||
|
||||
impl PrettyDebug for PositionalType {
|
||||
/// Prepare the PositionalType for pretty-printing
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
PositionalType::Mandatory(string, shape) => {
|
||||
b::description(string) + b::delimit("(", shape.pretty(), ")").as_kind().group()
|
||||
b::description(string) + b::delimit("(", shape.pretty(), ")").into_kind().group()
|
||||
}
|
||||
PositionalType::Optional(string, shape) => {
|
||||
b::description(string)
|
||||
+ b::operator("?")
|
||||
+ b::delimit("(", shape.pretty(), ")").as_kind().group()
|
||||
+ b::delimit("(", shape.pretty(), ")").into_kind().group()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PositionalType {
|
||||
/// Helper to create a mandatory positional argument type
|
||||
pub fn mandatory(name: &str, ty: SyntaxShape) -> PositionalType {
|
||||
PositionalType::Mandatory(name.to_string(), ty)
|
||||
}
|
||||
|
||||
/// Helper to create a mandatory positional argument with an "any" type
|
||||
pub fn mandatory_any(name: &str) -> PositionalType {
|
||||
PositionalType::Mandatory(name.to_string(), SyntaxShape::Any)
|
||||
}
|
||||
|
||||
/// Helper to create a mandatory positional argument with a block type
|
||||
pub fn mandatory_block(name: &str) -> PositionalType {
|
||||
PositionalType::Mandatory(name.to_string(), SyntaxShape::Block)
|
||||
}
|
||||
|
||||
/// Helper to create a optional positional argument type
|
||||
pub fn optional(name: &str, ty: SyntaxShape) -> PositionalType {
|
||||
PositionalType::Optional(name.to_string(), ty)
|
||||
}
|
||||
|
||||
/// Helper to create a optional positional argument with an "any" type
|
||||
pub fn optional_any(name: &str) -> PositionalType {
|
||||
PositionalType::Optional(name.to_string(), SyntaxShape::Any)
|
||||
}
|
||||
|
||||
pub(crate) fn name(&self) -> &str {
|
||||
/// Gets the name of the positional argument
|
||||
pub fn name(&self) -> &str {
|
||||
match self {
|
||||
PositionalType::Mandatory(s, _) => s,
|
||||
PositionalType::Optional(s, _) => s,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn syntax_type(&self) -> SyntaxShape {
|
||||
/// Gets the expected type of a positional argument
|
||||
pub fn syntax_type(&self) -> SyntaxShape {
|
||||
match *self {
|
||||
PositionalType::Mandatory(_, t) => t,
|
||||
PositionalType::Optional(_, t) => t,
|
||||
@ -74,17 +85,60 @@ impl PositionalType {
|
||||
|
||||
type Description = String;
|
||||
|
||||
/// The full signature of a command. All commands have a signature similar to a function signature.
|
||||
/// Commands will use this information to register themselves with Nu's core engine so that the command
|
||||
/// can be invoked, help can be displayed, and calls to the command can be error-checked.
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Signature {
|
||||
/// The name of the command. Used when calling the command
|
||||
pub name: String,
|
||||
/// Usage instructions about the command
|
||||
pub usage: String,
|
||||
/// The list of positional arguments, both required and optional, and their corresponding types and help text
|
||||
pub positional: Vec<(PositionalType, Description)>,
|
||||
/// After the positional arguments, a catch-all for the rest of the arguments that might follow, their type, and help text
|
||||
pub rest_positional: Option<(SyntaxShape, Description)>,
|
||||
/// The named flags with corresponding type and help text
|
||||
pub named: IndexMap<String, (NamedType, Description)>,
|
||||
/// The type of values being sent out from the command into the pipeline, if any
|
||||
pub yields: Option<Type>,
|
||||
/// The type of values being read in from the pipeline into the command, if any
|
||||
pub input: Option<Type>,
|
||||
/// If the command is expected to filter data, or to consume it (as a sink)
|
||||
pub is_filter: bool,
|
||||
}
|
||||
|
||||
impl Signature {
|
||||
pub fn shift_positional(&mut self) {
|
||||
self.positional = Vec::from(&self.positional[1..]);
|
||||
}
|
||||
|
||||
pub fn remove_named(&mut self, name: &str) {
|
||||
self.named.remove(name);
|
||||
}
|
||||
|
||||
pub fn allowed(&self) -> Vec<String> {
|
||||
let mut allowed = indexmap::IndexSet::new();
|
||||
|
||||
for (name, _) in &self.named {
|
||||
allowed.insert(format!("--{}", name));
|
||||
}
|
||||
|
||||
for (ty, _) in &self.positional {
|
||||
let shape = ty.syntax_type();
|
||||
allowed.insert(shape.display());
|
||||
}
|
||||
|
||||
if let Some((shape, _)) = &self.rest_positional {
|
||||
allowed.insert(shape.display());
|
||||
}
|
||||
|
||||
allowed.into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Signature {
|
||||
/// Prepare a Signature for pretty-printing
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"signature",
|
||||
@ -103,26 +157,32 @@ impl PrettyDebugWithSource for Signature {
|
||||
}
|
||||
|
||||
impl Signature {
|
||||
pub fn new(name: String) -> Signature {
|
||||
/// Create a new command signagure with the given name
|
||||
pub fn new(name: impl Into<String>) -> Signature {
|
||||
Signature {
|
||||
name,
|
||||
name: name.into(),
|
||||
usage: String::new(),
|
||||
positional: vec![],
|
||||
rest_positional: None,
|
||||
named: IndexMap::new(),
|
||||
named: indexmap::indexmap! {"help".into() => (NamedType::Switch, "Display this help message".into())},
|
||||
is_filter: false,
|
||||
yields: None,
|
||||
input: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new signature
|
||||
pub fn build(name: impl Into<String>) -> Signature {
|
||||
Signature::new(name.into())
|
||||
}
|
||||
|
||||
/// Add a description to the signature
|
||||
pub fn desc(mut self, usage: impl Into<String>) -> Signature {
|
||||
self.usage = usage.into();
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a required positional argument to the signature
|
||||
pub fn required(
|
||||
mut self,
|
||||
name: impl Into<String>,
|
||||
@ -137,6 +197,7 @@ impl Signature {
|
||||
self
|
||||
}
|
||||
|
||||
/// Add an optional positional argument to the signature
|
||||
pub fn optional(
|
||||
mut self,
|
||||
name: impl Into<String>,
|
||||
@ -151,6 +212,7 @@ impl Signature {
|
||||
self
|
||||
}
|
||||
|
||||
/// Add an optional named flag argument to the signature
|
||||
pub fn named(
|
||||
mut self,
|
||||
name: impl Into<String>,
|
||||
@ -163,6 +225,7 @@ impl Signature {
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a required named flag argument to the signature
|
||||
pub fn required_named(
|
||||
mut self,
|
||||
name: impl Into<String>,
|
||||
@ -175,152 +238,34 @@ impl Signature {
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a switch to the signature
|
||||
pub fn switch(mut self, name: impl Into<String>, desc: impl Into<String>) -> Signature {
|
||||
self.named
|
||||
.insert(name.into(), (NamedType::Switch, desc.into()));
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the filter flag for the signature
|
||||
pub fn filter(mut self) -> Signature {
|
||||
self.is_filter = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the type for the "rest" of the positional arguments
|
||||
pub fn rest(mut self, ty: SyntaxShape, desc: impl Into<String>) -> Signature {
|
||||
self.rest_positional = Some((ty, desc.into()));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, new, Serialize, Deserialize, Clone)]
|
||||
pub struct EvaluatedArgs {
|
||||
pub positional: Option<Vec<Value>>,
|
||||
pub named: Option<IndexMap<String, Value>>,
|
||||
}
|
||||
|
||||
impl EvaluatedArgs {
|
||||
pub fn slice_from(&self, from: usize) -> Vec<Value> {
|
||||
let positional = &self.positional;
|
||||
|
||||
match positional {
|
||||
None => vec![],
|
||||
Some(list) => list[from..].to_vec(),
|
||||
/// Add a type for the output of the command to the signature
|
||||
pub fn yields(mut self, ty: Type) -> Signature {
|
||||
self.yields = Some(ty);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a type for the input of the command to the signature
|
||||
pub fn input(mut self, ty: Type) -> Signature {
|
||||
self.input = Some(ty);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl EvaluatedArgs {
|
||||
pub fn nth(&self, pos: usize) -> Option<&Value> {
|
||||
match &self.positional {
|
||||
None => None,
|
||||
Some(array) => array.iter().nth(pos),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> {
|
||||
match &self.positional {
|
||||
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
||||
Some(array) => match array.iter().nth(pos) {
|
||||
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
||||
Some(item) => Ok(item),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
match &self.positional {
|
||||
None => 0,
|
||||
Some(array) => array.len(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has(&self, name: &str) -> bool {
|
||||
match &self.named {
|
||||
None => false,
|
||||
Some(named) => named.contains_key(name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, name: &str) -> Option<&Value> {
|
||||
match &self.named {
|
||||
None => None,
|
||||
Some(named) => named.get(name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn positional_iter(&self) -> PositionalIter<'_> {
|
||||
match &self.positional {
|
||||
None => PositionalIter::Empty,
|
||||
Some(v) => {
|
||||
let iter = v.iter();
|
||||
PositionalIter::Array(iter)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum PositionalIter<'a> {
|
||||
Empty,
|
||||
Array(std::slice::Iter<'a, Value>),
|
||||
}
|
||||
|
||||
impl<'a> Iterator for PositionalIter<'a> {
|
||||
type Item = &'a Value;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
PositionalIter::Empty => None,
|
||||
PositionalIter::Array(iter) => iter.next(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn evaluate_args(
|
||||
call: &hir::Call,
|
||||
registry: &CommandRegistry,
|
||||
scope: &Scope,
|
||||
source: &Text,
|
||||
) -> Result<EvaluatedArgs, ShellError> {
|
||||
let positional: Result<Option<Vec<_>>, _> = call
|
||||
.positional()
|
||||
.as_ref()
|
||||
.map(|p| {
|
||||
p.iter()
|
||||
.map(|e| evaluate_baseline_expr(e, registry, scope, source))
|
||||
.collect()
|
||||
})
|
||||
.transpose();
|
||||
|
||||
let positional = positional?;
|
||||
|
||||
let named: Result<Option<IndexMap<String, Value>>, ShellError> = call
|
||||
.named()
|
||||
.as_ref()
|
||||
.map(|n| {
|
||||
let mut results = IndexMap::new();
|
||||
|
||||
for (name, value) in n.named.iter() {
|
||||
match value {
|
||||
hir::named::NamedValue::PresentSwitch(tag) => {
|
||||
results.insert(name.clone(), UntaggedValue::boolean(true).into_value(tag));
|
||||
}
|
||||
hir::named::NamedValue::Value(expr) => {
|
||||
results.insert(
|
||||
name.clone(),
|
||||
evaluate_baseline_expr(expr, registry, scope, source)?,
|
||||
);
|
||||
}
|
||||
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
})
|
||||
.transpose();
|
||||
|
||||
let named = named?;
|
||||
|
||||
Ok(EvaluatedArgs::new(positional, named))
|
||||
}
|
45
crates/nu-protocol/src/syntax_shape.rs
Normal file
45
crates/nu-protocol/src/syntax_shape.rs
Normal file
@ -0,0 +1,45 @@
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// The syntactic shapes that values must match to be passed into a command. You can think of this as the type-checking that occurs when you call a function.
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||
pub enum SyntaxShape {
|
||||
/// Any syntactic form is allowed
|
||||
Any,
|
||||
/// Strings and string-like bare words are allowed
|
||||
String,
|
||||
/// Values that can be the right hand side of a '.'
|
||||
Member,
|
||||
/// A dotted path to navigate the table
|
||||
ColumnPath,
|
||||
/// Only a numeric (integer or decimal) value is allowed
|
||||
Number,
|
||||
/// A range is allowed (eg, `1..3`)
|
||||
Range,
|
||||
/// Only an integer value is allowed
|
||||
Int,
|
||||
/// A filepath is allowed
|
||||
Path,
|
||||
/// A glob pattern is allowed, eg `foo*`
|
||||
Pattern,
|
||||
/// A block is allowed, eg `{start this thing}`
|
||||
Block,
|
||||
}
|
||||
|
||||
impl PrettyDebug for SyntaxShape {
|
||||
/// Prepare SyntaxShape for pretty-printing
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::kind(match self {
|
||||
SyntaxShape::Any => "any",
|
||||
SyntaxShape::String => "string",
|
||||
SyntaxShape::Member => "member",
|
||||
SyntaxShape::ColumnPath => "column path",
|
||||
SyntaxShape::Number => "number",
|
||||
SyntaxShape::Range => "range",
|
||||
SyntaxShape::Int => "integer",
|
||||
SyntaxShape::Path => "file path",
|
||||
SyntaxShape::Pattern => "pattern",
|
||||
SyntaxShape::Block => "block",
|
||||
})
|
||||
}
|
||||
}
|
@ -1,38 +1,44 @@
|
||||
use crate::prelude::*;
|
||||
use nu_source::{DebugDocBuilder, Spanned, SpannedItem, Tagged};
|
||||
use nu_source::{DebugDocBuilder, HasSpan, Spanned, SpannedItem, Tagged};
|
||||
|
||||
/// A trait that allows structures to define a known .type_name() which pretty-prints the type
|
||||
pub trait ShellTypeName {
|
||||
fn type_name(&self) -> &'static str;
|
||||
}
|
||||
|
||||
impl<T: ShellTypeName> ShellTypeName for Spanned<T> {
|
||||
/// Return the type_name of the spanned item
|
||||
fn type_name(&self) -> &'static str {
|
||||
self.item.type_name()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ShellTypeName> ShellTypeName for &T {
|
||||
/// Return the type_name for the borrowed reference
|
||||
fn type_name(&self) -> &'static str {
|
||||
(*self).type_name()
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait that allows structures to define a known way to return a spanned type name
|
||||
pub trait SpannedTypeName {
|
||||
fn spanned_type_name(&self) -> Spanned<&'static str>;
|
||||
}
|
||||
|
||||
impl<T: ShellTypeName + HasSpan> SpannedTypeName for T {
|
||||
/// Return the type name as a spanned string
|
||||
fn spanned_type_name(&self) -> Spanned<&'static str> {
|
||||
self.type_name().spanned(self.span())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ShellTypeName> SpannedTypeName for Tagged<T> {
|
||||
/// Return the spanned type name for a Tagged value
|
||||
fn spanned_type_name(&self) -> Spanned<&'static str> {
|
||||
self.item.type_name().spanned(self.tag.span)
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait to enable pretty-printing of type information
|
||||
pub trait PrettyType {
|
||||
fn pretty_type(&self) -> DebugDocBuilder;
|
||||
}
|
408
crates/nu-protocol/src/type_shape.rs
Normal file
408
crates/nu-protocol/src/type_shape.rs
Normal file
@ -0,0 +1,408 @@
|
||||
///
|
||||
/// This file describes the structural types of the nushell system.
|
||||
///
|
||||
/// Its primary purpose today is to identify "equivalent" values for the purpose
|
||||
/// of merging rows into a single table or identify rows in a table that have the
|
||||
/// same shape for reflection.
|
||||
use crate::value::dict::Dictionary;
|
||||
use crate::value::primitive::Primitive;
|
||||
use crate::value::range::RangeInclusion;
|
||||
use crate::value::{UntaggedValue, Value};
|
||||
use derive_new::new;
|
||||
use nu_source::{b, DebugDoc, DebugDocBuilder, PrettyDebug};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
|
||||
/// Representation of the type of ranges
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, new)]
|
||||
pub struct RangeType {
|
||||
from: (Type, RangeInclusion),
|
||||
to: (Type, RangeInclusion),
|
||||
}
|
||||
|
||||
/// Representation of for the type of a value in Nu
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Type {
|
||||
/// A value which has no value
|
||||
Nothing,
|
||||
/// An integer-based value
|
||||
Int,
|
||||
/// A range between two values
|
||||
Range(Box<RangeType>),
|
||||
/// A decimal (floating point) value
|
||||
Decimal,
|
||||
/// A filesize in bytes
|
||||
Bytesize,
|
||||
/// A string of text
|
||||
String,
|
||||
/// A line of text (a string with trailing line ending)
|
||||
Line,
|
||||
/// A path through a table
|
||||
ColumnPath,
|
||||
/// A glob pattern (like foo*)
|
||||
Pattern,
|
||||
/// A boolean value
|
||||
Boolean,
|
||||
/// A date value (in UTC)
|
||||
Date,
|
||||
/// A data duration value
|
||||
Duration,
|
||||
/// A filepath value
|
||||
Path,
|
||||
/// A binary (non-text) buffer value
|
||||
Binary,
|
||||
|
||||
/// A row of data
|
||||
Row(Row),
|
||||
/// A full table of data
|
||||
Table(Vec<Type>),
|
||||
|
||||
/// A block of script (TODO)
|
||||
Block,
|
||||
/// An error value (TODO)
|
||||
Error,
|
||||
|
||||
/// Beginning of stream marker (used as bookend markers rather than actual values)
|
||||
BeginningOfStream,
|
||||
/// End of stream marker (used as bookend markers rather than actual values)
|
||||
EndOfStream,
|
||||
}
|
||||
|
||||
/// A shape representation of the type of a row
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, new)]
|
||||
pub struct Row {
|
||||
#[new(default)]
|
||||
map: BTreeMap<Column, Type>,
|
||||
}
|
||||
|
||||
impl Serialize for Row {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.collect_map(self.map.iter())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Row {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct RowVisitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for RowVisitor {
|
||||
type Value = Row;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(formatter, "a row")
|
||||
}
|
||||
|
||||
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
|
||||
where
|
||||
A: serde::de::MapAccess<'de>,
|
||||
{
|
||||
let mut new_map = BTreeMap::new();
|
||||
|
||||
loop {
|
||||
let entry = map.next_entry()?;
|
||||
|
||||
match entry {
|
||||
None => return Ok(Row { map: new_map }),
|
||||
Some((key, value)) => {
|
||||
new_map.insert(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_map(RowVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Type {
|
||||
/// Convert a Primitive into its corresponding Type
|
||||
pub fn from_primitive(primitive: &Primitive) -> Type {
|
||||
match primitive {
|
||||
Primitive::Nothing => Type::Nothing,
|
||||
Primitive::Int(_) => Type::Int,
|
||||
Primitive::Range(range) => {
|
||||
let (left_value, left_inclusion) = &range.from;
|
||||
let (right_value, right_inclusion) = &range.to;
|
||||
|
||||
let left_type = (Type::from_primitive(left_value), *left_inclusion);
|
||||
let right_type = (Type::from_primitive(right_value), *right_inclusion);
|
||||
|
||||
let range = RangeType::new(left_type, right_type);
|
||||
Type::Range(Box::new(range))
|
||||
}
|
||||
Primitive::Decimal(_) => Type::Decimal,
|
||||
Primitive::Bytes(_) => Type::Bytesize,
|
||||
Primitive::String(_) => Type::String,
|
||||
Primitive::Line(_) => Type::Line,
|
||||
Primitive::ColumnPath(_) => Type::ColumnPath,
|
||||
Primitive::Pattern(_) => Type::Pattern,
|
||||
Primitive::Boolean(_) => Type::Boolean,
|
||||
Primitive::Date(_) => Type::Date,
|
||||
Primitive::Duration(_) => Type::Duration,
|
||||
Primitive::Path(_) => Type::Path,
|
||||
Primitive::Binary(_) => Type::Binary,
|
||||
Primitive::BeginningOfStream => Type::BeginningOfStream,
|
||||
Primitive::EndOfStream => Type::EndOfStream,
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a dictionary into its corresponding row Type
|
||||
pub fn from_dictionary(dictionary: &Dictionary) -> Type {
|
||||
let mut map = BTreeMap::new();
|
||||
|
||||
for (key, value) in dictionary.entries.iter() {
|
||||
let column = Column::String(key.clone());
|
||||
map.insert(column, Type::from_value(value));
|
||||
}
|
||||
|
||||
Type::Row(Row { map })
|
||||
}
|
||||
|
||||
/// Convert a table into its corresponding Type
|
||||
pub fn from_table<'a>(table: impl IntoIterator<Item = &'a Value>) -> Type {
|
||||
let mut vec = vec![];
|
||||
|
||||
for item in table.into_iter() {
|
||||
vec.push(Type::from_value(item))
|
||||
}
|
||||
|
||||
Type::Table(vec)
|
||||
}
|
||||
|
||||
/// Convert a value into its corresponding Type
|
||||
pub fn from_value<'a>(value: impl Into<&'a UntaggedValue>) -> Type {
|
||||
match value.into() {
|
||||
UntaggedValue::Primitive(p) => Type::from_primitive(p),
|
||||
UntaggedValue::Row(row) => Type::from_dictionary(row),
|
||||
UntaggedValue::Table(table) => Type::from_table(table.iter()),
|
||||
UntaggedValue::Error(_) => Type::Error,
|
||||
UntaggedValue::Block(_) => Type::Block,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for Type {
|
||||
/// Prepare Type for pretty-printing
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
Type::Nothing => ty("nothing"),
|
||||
Type::Int => ty("integer"),
|
||||
Type::Range(range) => {
|
||||
let (left, left_inclusion) = &range.from;
|
||||
let (right, right_inclusion) = &range.to;
|
||||
|
||||
let left_bracket = b::delimiter(match left_inclusion {
|
||||
RangeInclusion::Exclusive => "(",
|
||||
RangeInclusion::Inclusive => "[",
|
||||
});
|
||||
|
||||
let right_bracket = b::delimiter(match right_inclusion {
|
||||
RangeInclusion::Exclusive => ")",
|
||||
RangeInclusion::Inclusive => "]",
|
||||
});
|
||||
|
||||
b::typed(
|
||||
"range",
|
||||
(left_bracket
|
||||
+ left.pretty()
|
||||
+ b::operator(",")
|
||||
+ b::space()
|
||||
+ right.pretty()
|
||||
+ right_bracket)
|
||||
.group(),
|
||||
)
|
||||
}
|
||||
Type::Decimal => ty("decimal"),
|
||||
Type::Bytesize => ty("bytesize"),
|
||||
Type::String => ty("string"),
|
||||
Type::Line => ty("line"),
|
||||
Type::ColumnPath => ty("column-path"),
|
||||
Type::Pattern => ty("pattern"),
|
||||
Type::Boolean => ty("boolean"),
|
||||
Type::Date => ty("date"),
|
||||
Type::Duration => ty("duration"),
|
||||
Type::Path => ty("path"),
|
||||
Type::Binary => ty("binary"),
|
||||
Type::Error => b::error("error"),
|
||||
Type::BeginningOfStream => b::keyword("beginning-of-stream"),
|
||||
Type::EndOfStream => b::keyword("end-of-stream"),
|
||||
Type::Row(row) => (b::kind("row")
|
||||
+ b::space()
|
||||
+ b::intersperse(
|
||||
row.map.iter().map(|(key, ty)| {
|
||||
(b::key(match key {
|
||||
Column::String(string) => string.clone(),
|
||||
Column::Value => "<value>".to_string(),
|
||||
}) + b::delimit("(", ty.pretty(), ")").into_kind())
|
||||
.nest()
|
||||
}),
|
||||
b::space(),
|
||||
)
|
||||
.nest())
|
||||
.nest(),
|
||||
|
||||
Type::Table(table) => {
|
||||
let mut group: Group<DebugDoc, Vec<(usize, usize)>> = Group::new();
|
||||
|
||||
for (i, item) in table.iter().enumerate() {
|
||||
group.add(item.to_doc(), i);
|
||||
}
|
||||
|
||||
(b::kind("table") + b::space() + b::keyword("of")).group()
|
||||
+ b::space()
|
||||
+ (if group.len() == 1 {
|
||||
let (doc, _) = group.into_iter().collect::<Vec<_>>()[0].clone();
|
||||
DebugDocBuilder::from_doc(doc)
|
||||
} else {
|
||||
b::intersperse(
|
||||
group.into_iter().map(|(doc, rows)| {
|
||||
(b::intersperse(
|
||||
rows.iter().map(|(from, to)| {
|
||||
if from == to {
|
||||
b::description(from)
|
||||
} else {
|
||||
(b::description(from)
|
||||
+ b::space()
|
||||
+ b::keyword("to")
|
||||
+ b::space()
|
||||
+ b::description(to))
|
||||
.group()
|
||||
}
|
||||
}),
|
||||
b::description(", "),
|
||||
) + b::description(":")
|
||||
+ b::space()
|
||||
+ DebugDocBuilder::from_doc(doc))
|
||||
.nest()
|
||||
}),
|
||||
b::space(),
|
||||
)
|
||||
})
|
||||
}
|
||||
Type::Block => ty("block"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A view into dictionaries for debug purposes
|
||||
#[derive(Debug, new)]
|
||||
struct DebugEntry<'a> {
|
||||
key: &'a Column,
|
||||
value: &'a Type,
|
||||
}
|
||||
|
||||
impl<'a> PrettyDebug for DebugEntry<'a> {
|
||||
/// Prepare debug entries for pretty-printing
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
(b::key(match self.key {
|
||||
Column::String(string) => string.clone(),
|
||||
Column::Value => "<value>".to_string(),
|
||||
}) + b::delimit("(", self.value.pretty(), ")").into_kind())
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to create a pretty-print for the type
|
||||
fn ty(name: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
b::kind(format!("{}", name))
|
||||
}
|
||||
|
||||
pub trait GroupedValue: Debug + Clone {
|
||||
type Item;
|
||||
|
||||
fn new() -> Self;
|
||||
fn merge(&mut self, value: Self::Item);
|
||||
}
|
||||
|
||||
impl GroupedValue for Vec<(usize, usize)> {
|
||||
type Item = usize;
|
||||
|
||||
fn new() -> Vec<(usize, usize)> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn merge(&mut self, new_value: usize) {
|
||||
match self.last_mut() {
|
||||
Some(value) if value.1 == new_value - 1 => {
|
||||
value.1 += 1;
|
||||
}
|
||||
|
||||
_ => self.push((new_value, new_value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Group<K: Debug + Eq + Hash, V: GroupedValue> {
|
||||
values: indexmap::IndexMap<K, V>,
|
||||
}
|
||||
|
||||
impl<K, G> Group<K, G>
|
||||
where
|
||||
K: Debug + Eq + Hash,
|
||||
G: GroupedValue,
|
||||
{
|
||||
pub fn new() -> Group<K, G> {
|
||||
Group {
|
||||
values: indexmap::IndexMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.values.len()
|
||||
}
|
||||
|
||||
pub fn into_iter(self) -> impl Iterator<Item = (K, G)> {
|
||||
self.values.into_iter()
|
||||
}
|
||||
|
||||
pub fn add(&mut self, key: impl Into<K>, value: impl Into<G::Item>) {
|
||||
let key = key.into();
|
||||
let value = value.into();
|
||||
|
||||
let group = self.values.get_mut(&key);
|
||||
|
||||
match group {
|
||||
None => {
|
||||
self.values.insert(key, {
|
||||
let mut group = G::new();
|
||||
group.merge(value);
|
||||
group
|
||||
});
|
||||
}
|
||||
Some(group) => {
|
||||
group.merge(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub enum Column {
|
||||
String(String),
|
||||
Value,
|
||||
}
|
||||
|
||||
impl Into<Column> for String {
|
||||
fn into(self) -> Column {
|
||||
Column::String(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Column> for &String {
|
||||
fn into(self) -> Column {
|
||||
Column::String(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Column> for &str {
|
||||
fn into(self) -> Column {
|
||||
Column::String(self.to_string())
|
||||
}
|
||||
}
|
375
crates/nu-protocol/src/value.rs
Normal file
375
crates/nu-protocol/src/value.rs
Normal file
@ -0,0 +1,375 @@
|
||||
pub mod column_path;
|
||||
mod convert;
|
||||
mod debug;
|
||||
pub mod dict;
|
||||
pub mod evaluate;
|
||||
pub mod primitive;
|
||||
pub mod range;
|
||||
mod serde_bigdecimal;
|
||||
mod serde_bigint;
|
||||
|
||||
use crate::type_name::{ShellTypeName, SpannedTypeName};
|
||||
use crate::value::dict::Dictionary;
|
||||
use crate::value::evaluate::Evaluate;
|
||||
use crate::value::primitive::Primitive;
|
||||
use crate::value::range::{Range, RangeInclusion};
|
||||
use crate::{ColumnPath, PathMember};
|
||||
use bigdecimal::BigDecimal;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_source::{AnchorLocation, HasSpan, Span, Spanned, Tag};
|
||||
use num_bigint::BigInt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::time::SystemTime;
|
||||
|
||||
/// The core structured values that flow through a pipeline
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
||||
pub enum UntaggedValue {
|
||||
/// A primitive (or fundamental) type of values
|
||||
Primitive(Primitive),
|
||||
/// A table row
|
||||
Row(Dictionary),
|
||||
/// A full inner (or embedded) table
|
||||
Table(Vec<Value>),
|
||||
|
||||
/// An error value that represents an error that occurred as the values in the pipeline were built
|
||||
Error(ShellError),
|
||||
|
||||
/// A block of Nu code, eg `{ ls | get name }`
|
||||
Block(Evaluate),
|
||||
}
|
||||
|
||||
impl UntaggedValue {
|
||||
/// Tags an UntaggedValue so that it can become a Value
|
||||
pub fn retag(self, tag: impl Into<Tag>) -> Value {
|
||||
Value {
|
||||
value: self,
|
||||
tag: tag.into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the corresponding descriptors (column names) associated with this value
|
||||
pub fn data_descriptors(&self) -> Vec<String> {
|
||||
match self {
|
||||
UntaggedValue::Primitive(_) => vec![],
|
||||
UntaggedValue::Row(columns) => columns.entries.keys().map(|x| x.to_string()).collect(),
|
||||
UntaggedValue::Block(_) => vec![],
|
||||
UntaggedValue::Table(_) => vec![],
|
||||
UntaggedValue::Error(_) => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert this UntaggedValue to a Value with the given Tag
|
||||
pub fn into_value(self, tag: impl Into<Tag>) -> Value {
|
||||
Value {
|
||||
value: self,
|
||||
tag: tag.into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert this UntaggedValue into a Value with an empty Tag
|
||||
pub fn into_untagged_value(self) -> Value {
|
||||
Value {
|
||||
value: self,
|
||||
tag: Tag::unknown(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if this value represents boolean true
|
||||
pub fn is_true(&self) -> bool {
|
||||
match self {
|
||||
UntaggedValue::Primitive(Primitive::Boolean(true)) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the value represents something other than Nothing
|
||||
pub fn is_some(&self) -> bool {
|
||||
!self.is_none()
|
||||
}
|
||||
|
||||
/// Returns true if the value represents Nothing
|
||||
pub fn is_none(&self) -> bool {
|
||||
match self {
|
||||
UntaggedValue::Primitive(Primitive::Nothing) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the value represents an error
|
||||
pub fn is_error(&self) -> bool {
|
||||
match self {
|
||||
UntaggedValue::Error(_err) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Expect this value to be an error and return it
|
||||
pub fn expect_error(&self) -> ShellError {
|
||||
match self {
|
||||
UntaggedValue::Error(err) => err.clone(),
|
||||
_ => panic!("Don't call expect_error without first calling is_error"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Expect this value to be a string and return it
|
||||
pub fn expect_string(&self) -> &str {
|
||||
match self {
|
||||
UntaggedValue::Primitive(Primitive::String(string)) => &string[..],
|
||||
_ => panic!("expect_string assumes that the value must be a string"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper for creating row values
|
||||
pub fn row(entries: IndexMap<String, Value>) -> UntaggedValue {
|
||||
UntaggedValue::Row(entries.into())
|
||||
}
|
||||
|
||||
/// Helper for creating table values
|
||||
pub fn table(list: &[Value]) -> UntaggedValue {
|
||||
UntaggedValue::Table(list.to_vec())
|
||||
}
|
||||
|
||||
/// Helper for creating string values
|
||||
pub fn string(s: impl Into<String>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::String(s.into()))
|
||||
}
|
||||
|
||||
/// Helper for creating line values
|
||||
pub fn line(s: impl Into<String>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Line(s.into()))
|
||||
}
|
||||
|
||||
/// Helper for creating column-path values
|
||||
pub fn column_path(s: Vec<impl Into<PathMember>>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(
|
||||
s.into_iter().map(|p| p.into()).collect(),
|
||||
)))
|
||||
}
|
||||
|
||||
/// Helper for creating integer values
|
||||
pub fn int(i: impl Into<BigInt>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Int(i.into()))
|
||||
}
|
||||
|
||||
/// Helper for creating glob pattern values
|
||||
pub fn pattern(s: impl Into<String>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::String(s.into()))
|
||||
}
|
||||
|
||||
/// Helper for creating filepath values
|
||||
pub fn path(s: impl Into<PathBuf>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Path(s.into()))
|
||||
}
|
||||
|
||||
/// Helper for creating bytesize values
|
||||
pub fn bytes(s: impl Into<u64>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Bytes(s.into()))
|
||||
}
|
||||
|
||||
/// Helper for creating decimal values
|
||||
pub fn decimal(s: impl Into<BigDecimal>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Decimal(s.into()))
|
||||
}
|
||||
|
||||
/// Helper for creating binary (non-text) buffer values
|
||||
pub fn binary(binary: Vec<u8>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Binary(binary))
|
||||
}
|
||||
|
||||
/// Helper for creating range values
|
||||
pub fn range(
|
||||
left: (Spanned<Primitive>, RangeInclusion),
|
||||
right: (Spanned<Primitive>, RangeInclusion),
|
||||
) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Range(Box::new(Range::new(left, right))))
|
||||
}
|
||||
|
||||
/// Helper for creating boolean values
|
||||
pub fn boolean(s: impl Into<bool>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Boolean(s.into()))
|
||||
}
|
||||
|
||||
/// Helper for creating date duration values
|
||||
pub fn duration(secs: u64) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Duration(secs))
|
||||
}
|
||||
|
||||
/// Helper for creating datatime values
|
||||
pub fn system_date(s: SystemTime) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Date(s.into()))
|
||||
}
|
||||
|
||||
/// Helper for creating the Nothing value
|
||||
pub fn nothing() -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Nothing)
|
||||
}
|
||||
}
|
||||
|
||||
/// The fundamental structured value that flows through the pipeline, with associated metadata
|
||||
#[derive(Debug, Clone, PartialOrd, PartialEq, Ord, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct Value {
|
||||
pub value: UntaggedValue,
|
||||
pub tag: Tag,
|
||||
}
|
||||
|
||||
/// Overload deferencing to give back the UntaggedValue inside of a Value
|
||||
impl std::ops::Deref for Value {
|
||||
type Target = UntaggedValue;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl Value {
|
||||
/// Get the corresponding anchor (originating location) for the Value
|
||||
pub fn anchor(&self) -> Option<AnchorLocation> {
|
||||
self.tag.anchor()
|
||||
}
|
||||
|
||||
/// Get the name (url, filepath, etc) behind an anchor for the Value
|
||||
pub fn anchor_name(&self) -> Option<String> {
|
||||
self.tag.anchor_name()
|
||||
}
|
||||
|
||||
/// Get the metadata for the Value
|
||||
pub fn tag(&self) -> Tag {
|
||||
self.tag.clone()
|
||||
}
|
||||
|
||||
/// View the Value as a string, if possible
|
||||
pub fn as_string(&self) -> Result<String, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(Primitive::String(string)) => Ok(string.clone()),
|
||||
UntaggedValue::Primitive(Primitive::Line(line)) => Ok(line.clone() + "\n"),
|
||||
_ => Err(ShellError::type_error("string", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
/// View into the borrowed string contents of a Value, if possible
|
||||
pub fn as_forgiving_string(&self) -> Result<&str, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(Primitive::String(string)) => Ok(&string[..]),
|
||||
_ => Err(ShellError::type_error("string", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
/// View the Value as a path, if possible
|
||||
pub fn as_path(&self) -> Result<PathBuf, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(Primitive::Path(path)) => Ok(path.clone()),
|
||||
UntaggedValue::Primitive(Primitive::String(path_str)) => Ok(PathBuf::from(&path_str)),
|
||||
_ => Err(ShellError::type_error("Path", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
/// View the Value as a Primitive value, if possible
|
||||
pub fn as_primitive(&self) -> Result<Primitive, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(primitive) => Ok(primitive.clone()),
|
||||
_ => Err(ShellError::type_error(
|
||||
"Primitive",
|
||||
self.spanned_type_name(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
/// View the Value as unsigned 64-bit, if possible
|
||||
pub fn as_u64(&self) -> Result<u64, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(primitive) => primitive.as_u64(self.tag.span),
|
||||
_ => Err(ShellError::type_error("integer", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
/// View the Value as boolean, if possible
|
||||
pub fn as_bool(&self) -> Result<bool, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(Primitive::Boolean(p)) => Ok(*p),
|
||||
_ => Err(ShellError::type_error("boolean", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Value> for String {
|
||||
fn into(self) -> Value {
|
||||
let end = self.len();
|
||||
Value {
|
||||
value: self.into(),
|
||||
tag: Tag {
|
||||
anchor: None,
|
||||
span: Span::new(0, end),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<UntaggedValue> for &str {
|
||||
/// Convert a string slice into an UntaggedValue
|
||||
fn into(self) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::String(self.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<UntaggedValue> for Value {
|
||||
/// Convert a Value into an UntaggedValue
|
||||
fn into(self) -> UntaggedValue {
|
||||
self.value
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a borrowed Value into a borrowed UntaggedValue
|
||||
impl<'a> Into<&'a UntaggedValue> for &'a Value {
|
||||
fn into(self) -> &'a UntaggedValue {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Value {
|
||||
/// Return the corresponding Span for the Value
|
||||
fn span(&self) -> Span {
|
||||
self.tag.span
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for Value {
|
||||
/// Get the type name for the Value
|
||||
fn type_name(&self) -> &'static str {
|
||||
ShellTypeName::type_name(&self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for UntaggedValue {
|
||||
/// Get the type name for the UntaggedValue
|
||||
fn type_name(&self) -> &'static str {
|
||||
match &self {
|
||||
UntaggedValue::Primitive(p) => p.type_name(),
|
||||
UntaggedValue::Row(_) => "row",
|
||||
UntaggedValue::Table(_) => "table",
|
||||
UntaggedValue::Error(_) => "error",
|
||||
UntaggedValue::Block(_) => "block",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Primitive> for UntaggedValue {
|
||||
/// Convert a Primitive to an UntaggedValue
|
||||
fn from(input: Primitive) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(input)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for UntaggedValue {
|
||||
/// Convert a String to an UntaggedValue
|
||||
fn from(input: String) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::String(input))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ShellError> for UntaggedValue {
|
||||
fn from(e: ShellError) -> Self {
|
||||
UntaggedValue::Error(e)
|
||||
}
|
||||
}
|
129
crates/nu-protocol/src/value/column_path.rs
Normal file
129
crates/nu-protocol/src/value/column_path.rs
Normal file
@ -0,0 +1,129 @@
|
||||
use crate::Value;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, span_for_spanned_list, DebugDocBuilder, HasFallibleSpan, PrettyDebug, Span};
|
||||
use num_bigint::BigInt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// A PathMember that has yet to be spanned so that it can be used in later processing
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum UnspannedPathMember {
|
||||
String(String),
|
||||
Int(BigInt),
|
||||
}
|
||||
|
||||
impl UnspannedPathMember {
|
||||
/// Add the span information and get a full PathMember
|
||||
pub fn into_path_member(self, span: impl Into<Span>) -> PathMember {
|
||||
PathMember {
|
||||
unspanned: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A basic piece of a ColumnPath, which describes the steps to take through a table to arrive a cell, row, or inner table
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub struct PathMember {
|
||||
pub unspanned: UnspannedPathMember,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl PrettyDebug for &PathMember {
|
||||
/// Gets the PathMember ready to be pretty-printed
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match &self.unspanned {
|
||||
UnspannedPathMember::String(string) => b::primitive(format!("{:?}", string)),
|
||||
UnspannedPathMember::Int(int) => b::primitive(format!("{}", int)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The fundamental path primitive to descrive how to navigate through a table to get to a sub-item. A path member can be either a word or a number. Words/strings are taken to mean
|
||||
/// a column name, and numbers are the row number. Taken together they describe which column or row to narrow to in order to get data.
|
||||
///
|
||||
/// Rows must follow column names, they can't come first. eg) `foo.1` is valid where `1.foo` is not.
|
||||
#[derive(
|
||||
Debug, Hash, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Getters, Clone, new,
|
||||
)]
|
||||
pub struct ColumnPath {
|
||||
#[get = "pub"]
|
||||
members: Vec<PathMember>,
|
||||
}
|
||||
|
||||
impl ColumnPath {
|
||||
/// Iterate over the members of the column path
|
||||
pub fn iter(&self) -> impl Iterator<Item = &PathMember> {
|
||||
self.members.iter()
|
||||
}
|
||||
|
||||
/// Returns the last member and a slice of the remaining members
|
||||
pub fn split_last(&self) -> Option<(&PathMember, &[PathMember])> {
|
||||
self.members.split_last()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for ColumnPath {
|
||||
/// Gets the ColumnPath ready to be pretty-printed
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
let members: Vec<DebugDocBuilder> =
|
||||
self.members.iter().map(|member| member.pretty()).collect();
|
||||
|
||||
b::delimit(
|
||||
"(",
|
||||
b::description("path") + b::equals() + b::intersperse(members, b::space()),
|
||||
")",
|
||||
)
|
||||
.nest()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasFallibleSpan for ColumnPath {
|
||||
/// Creates a span that will cover the column path, if possible
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
if self.members.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(span_for_spanned_list(self.members.iter().map(|m| m.span)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PathMember {
|
||||
/// Create a string path member
|
||||
pub fn string(string: impl Into<String>, span: impl Into<Span>) -> PathMember {
|
||||
UnspannedPathMember::String(string.into()).into_path_member(span)
|
||||
}
|
||||
|
||||
/// Create a numeric path member
|
||||
pub fn int(int: impl Into<BigInt>, span: impl Into<Span>) -> PathMember {
|
||||
UnspannedPathMember::Int(int.into()).into_path_member(span)
|
||||
}
|
||||
}
|
||||
|
||||
/// Prepares a list of "sounds like" matches for the string you're trying to find
|
||||
pub fn did_you_mean(obj_source: &Value, field_tried: &PathMember) -> Option<Vec<(usize, String)>> {
|
||||
let field_tried = match &field_tried.unspanned {
|
||||
UnspannedPathMember::String(string) => string.clone(),
|
||||
UnspannedPathMember::Int(int) => format!("{}", int),
|
||||
};
|
||||
|
||||
let possibilities = obj_source.data_descriptors();
|
||||
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
.into_iter()
|
||||
.map(|x| {
|
||||
let word = x;
|
||||
let distance = natural::distance::levenshtein_distance(&word, &field_tried);
|
||||
|
||||
(distance, word)
|
||||
})
|
||||
.collect();
|
||||
|
||||
if !possible_matches.is_empty() {
|
||||
possible_matches.sort();
|
||||
Some(possible_matches)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
59
crates/nu-protocol/src/value/convert.rs
Normal file
59
crates/nu-protocol/src/value/convert.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use crate::type_name::SpannedTypeName;
|
||||
use crate::value::dict::Dictionary;
|
||||
use crate::value::primitive::Primitive;
|
||||
use crate::value::{UntaggedValue, Value};
|
||||
use nu_errors::{CoerceInto, ShellError};
|
||||
use nu_source::TaggedItem;
|
||||
|
||||
impl std::convert::TryFrom<&Value> for i64 {
|
||||
type Error = ShellError;
|
||||
|
||||
/// Convert to an i64 integer, if possible
|
||||
fn try_from(value: &Value) -> Result<i64, ShellError> {
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::Int(int)) => {
|
||||
int.tagged(&value.tag).coerce_into("converting to i64")
|
||||
}
|
||||
_ => Err(ShellError::type_error("Integer", value.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::TryFrom<&Value> for String {
|
||||
type Error = ShellError;
|
||||
|
||||
/// Convert to a string, if possible
|
||||
fn try_from(value: &Value) -> Result<String, ShellError> {
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||
_ => Err(ShellError::type_error("String", value.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::TryFrom<&Value> for Vec<u8> {
|
||||
type Error = ShellError;
|
||||
|
||||
/// Convert to a u8 vec, if possible
|
||||
fn try_from(value: &Value) -> Result<Vec<u8>, ShellError> {
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
|
||||
_ => Err(ShellError::type_error("Binary", value.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::convert::TryFrom<&'a Value> for &'a Dictionary {
|
||||
type Error = ShellError;
|
||||
|
||||
/// Convert to a dictionary, if possible
|
||||
fn try_from(value: &'a Value) -> Result<&'a Dictionary, ShellError> {
|
||||
match &value.value {
|
||||
UntaggedValue::Row(d) => Ok(d),
|
||||
_ => Err(ShellError::type_error(
|
||||
"Dictionary",
|
||||
value.spanned_type_name(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
@ -1,15 +1,41 @@
|
||||
use crate::data::base::Primitive;
|
||||
use crate::traits::PrettyType;
|
||||
use crate::type_name::PrettyType;
|
||||
use crate::value::primitive::Primitive;
|
||||
use crate::value::{UntaggedValue, Value};
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||
|
||||
impl PrettyDebug for &Value {
|
||||
/// Get a borrowed Value ready to be pretty-printed
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
PrettyDebug::pretty(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for Value {
|
||||
/// Get a Value ready to be pretty-printed
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(p) => p.pretty(),
|
||||
UntaggedValue::Row(row) => row.pretty_builder().nest(1).group().into(),
|
||||
UntaggedValue::Table(table) => {
|
||||
b::delimit("[", b::intersperse(table, b::space()), "]").nest()
|
||||
}
|
||||
UntaggedValue::Error(_) => b::error("error"),
|
||||
UntaggedValue::Block(_) => b::opaque("block"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyType for Primitive {
|
||||
/// Find the type of the Value and prepare it for pretty-printing
|
||||
fn pretty_type(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
Primitive::Nothing => ty("nothing"),
|
||||
Primitive::Int(_) => ty("integer"),
|
||||
Primitive::Range(_) => ty("range"),
|
||||
Primitive::Decimal(_) => ty("decimal"),
|
||||
Primitive::Bytes(_) => ty("bytesize"),
|
||||
Primitive::String(_) => ty("string"),
|
||||
Primitive::Line(_) => ty("line"),
|
||||
Primitive::ColumnPath(_) => ty("column-path"),
|
||||
Primitive::Pattern(_) => ty("pattern"),
|
||||
Primitive::Boolean(_) => ty("boolean"),
|
||||
@ -24,13 +50,30 @@ impl PrettyType for Primitive {
|
||||
}
|
||||
|
||||
impl PrettyDebug for Primitive {
|
||||
/// Get a Primitive value ready to be pretty-printed
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
Primitive::Nothing => b::primitive("nothing"),
|
||||
Primitive::Int(int) => prim(format_args!("{}", int)),
|
||||
Primitive::Decimal(decimal) => prim(format_args!("{}", decimal)),
|
||||
Primitive::Range(range) => {
|
||||
let (left, left_inclusion) = &range.from;
|
||||
let (right, right_inclusion) = &range.to;
|
||||
|
||||
b::typed(
|
||||
"range",
|
||||
(left_inclusion.debug_left_bracket()
|
||||
+ left.pretty()
|
||||
+ b::operator(",")
|
||||
+ b::space()
|
||||
+ right.pretty()
|
||||
+ right_inclusion.debug_right_bracket())
|
||||
.group(),
|
||||
)
|
||||
}
|
||||
Primitive::Bytes(bytes) => primitive_doc(bytes, "bytesize"),
|
||||
Primitive::String(string) => prim(string),
|
||||
Primitive::Line(string) => prim(string),
|
||||
Primitive::ColumnPath(path) => path.pretty(),
|
||||
Primitive::Pattern(pattern) => primitive_doc(pattern, "pattern"),
|
||||
Primitive::Boolean(boolean) => match boolean {
|
||||
@ -51,10 +94,10 @@ fn prim(name: impl std::fmt::Debug) -> DebugDocBuilder {
|
||||
b::primitive(format!("{:?}", name))
|
||||
}
|
||||
|
||||
fn ty(name: impl std::fmt::Debug) -> DebugDocBuilder {
|
||||
b::kind(format!("{:?}", name))
|
||||
}
|
||||
|
||||
fn primitive_doc(name: impl std::fmt::Debug, ty: impl Into<String>) -> DebugDocBuilder {
|
||||
b::primitive(format!("{:?}", name)) + b::delimit("(", b::kind(ty.into()), ")")
|
||||
}
|
||||
|
||||
fn ty(name: impl std::fmt::Debug) -> DebugDocBuilder {
|
||||
b::kind(format!("{:?}", name))
|
||||
}
|
236
crates/nu-protocol/src/value/dict.rs
Normal file
236
crates/nu-protocol/src/value/dict.rs
Normal file
@ -0,0 +1,236 @@
|
||||
use crate::maybe_owned::MaybeOwned;
|
||||
use crate::value::primitive::Primitive;
|
||||
use crate::value::{UntaggedValue, Value};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use indexmap::IndexMap;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, Spanned, Tag};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
/// A dictionary that can hold a mapping from names to Values
|
||||
#[derive(Debug, Default, Serialize, Deserialize, PartialEq, Eq, Clone, Getters, new)]
|
||||
pub struct Dictionary {
|
||||
#[get = "pub"]
|
||||
pub entries: IndexMap<String, Value>,
|
||||
}
|
||||
|
||||
#[allow(clippy::derive_hash_xor_eq)]
|
||||
impl Hash for Dictionary {
|
||||
/// Create the hash function to allow the Hash trait for dictionaries
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
let mut entries = self.entries.clone();
|
||||
entries.sort_keys();
|
||||
entries.keys().collect::<Vec<&String>>().hash(state);
|
||||
entries.values().collect::<Vec<&Value>>().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Dictionary {
|
||||
/// Compare two dictionaries for sort ordering
|
||||
fn partial_cmp(&self, other: &Dictionary) -> Option<Ordering> {
|
||||
let this: Vec<&String> = self.entries.keys().collect();
|
||||
let that: Vec<&String> = other.entries.keys().collect();
|
||||
|
||||
if this != that {
|
||||
return this.partial_cmp(&that);
|
||||
}
|
||||
|
||||
let this: Vec<&Value> = self.entries.values().collect();
|
||||
let that: Vec<&Value> = self.entries.values().collect();
|
||||
|
||||
this.partial_cmp(&that)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Dictionary {
|
||||
/// Compare two dictionaries for ordering
|
||||
fn cmp(&self, other: &Dictionary) -> Ordering {
|
||||
let this: Vec<&String> = self.entries.keys().collect();
|
||||
let that: Vec<&String> = other.entries.keys().collect();
|
||||
|
||||
if this != that {
|
||||
return this.cmp(&that);
|
||||
}
|
||||
|
||||
let this: Vec<&Value> = self.entries.values().collect();
|
||||
let that: Vec<&Value> = self.entries.values().collect();
|
||||
|
||||
this.cmp(&that)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Value> for Dictionary {
|
||||
/// Test a dictionary against a Value for equality
|
||||
fn eq(&self, other: &Value) -> bool {
|
||||
match &other.value {
|
||||
UntaggedValue::Row(d) => self == d,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A key-value pair specifically meant to be used in debug and pretty-printing
|
||||
#[derive(Debug, new)]
|
||||
struct DebugEntry<'a> {
|
||||
key: &'a str,
|
||||
value: &'a Value,
|
||||
}
|
||||
|
||||
impl<'a> PrettyDebug for DebugEntry<'a> {
|
||||
/// Build the the information to pretty-print the DebugEntry
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
(b::key(self.key.to_string()) + b::equals() + self.value.pretty().into_value()).group()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for Dictionary {
|
||||
/// Get a Dictionary ready to be pretty-printed
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
"(",
|
||||
b::intersperse(
|
||||
self.entries()
|
||||
.iter()
|
||||
.map(|(key, value)| DebugEntry::new(key, value)),
|
||||
b::space(),
|
||||
),
|
||||
")",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IndexMap<String, Value>> for Dictionary {
|
||||
/// Create a dictionary from a map of strings to Values
|
||||
fn from(input: IndexMap<String, Value>) -> Dictionary {
|
||||
let mut out = IndexMap::default();
|
||||
|
||||
for (key, value) in input {
|
||||
out.insert(key, value);
|
||||
}
|
||||
|
||||
Dictionary::new(out)
|
||||
}
|
||||
}
|
||||
|
||||
impl Dictionary {
|
||||
/// Find the matching Value for a given key, if possible. If not, return a Primitive::Nothing
|
||||
pub fn get_data(&self, desc: &str) -> MaybeOwned<'_, Value> {
|
||||
match self.entries.get(desc) {
|
||||
Some(v) => MaybeOwned::Borrowed(v),
|
||||
None => MaybeOwned::Owned(
|
||||
UntaggedValue::Primitive(Primitive::Nothing).into_untagged_value(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate the keys in the Dictionary
|
||||
pub fn keys(&self) -> impl Iterator<Item = &String> {
|
||||
self.entries.keys()
|
||||
}
|
||||
|
||||
/// Checks if given key exists
|
||||
pub fn contains_key(&self, key: &str) -> bool {
|
||||
self.entries.contains_key(key)
|
||||
}
|
||||
|
||||
/// Find the matching Value for a key, if possible
|
||||
pub fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value> {
|
||||
let result = self
|
||||
.entries
|
||||
.iter()
|
||||
.find(|(desc_name, _)| *desc_name == name.item)?
|
||||
.1;
|
||||
|
||||
Some(
|
||||
result
|
||||
.value
|
||||
.clone()
|
||||
.into_value(Tag::new(result.tag.anchor(), name.span)),
|
||||
)
|
||||
}
|
||||
|
||||
/// Get a mutable entry that matches a key, if possible
|
||||
pub fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Value> {
|
||||
match self
|
||||
.entries
|
||||
.iter_mut()
|
||||
.find(|(desc_name, _)| *desc_name == name)
|
||||
{
|
||||
Some((_, v)) => Some(v),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert a new key/value pair into the dictionary
|
||||
pub fn insert_data_at_key(&mut self, name: &str, value: Value) {
|
||||
self.entries.insert(name.to_string(), value);
|
||||
}
|
||||
}
|
||||
|
||||
/// A helper to help create dictionaries for you. It has the ability to insert values into the dictionary while maintaining the tags that need to be applied to the individual members
|
||||
#[derive(Debug)]
|
||||
pub struct TaggedDictBuilder {
|
||||
tag: Tag,
|
||||
dict: IndexMap<String, Value>,
|
||||
}
|
||||
|
||||
impl TaggedDictBuilder {
|
||||
/// Create a new builder
|
||||
pub fn new(tag: impl Into<Tag>) -> TaggedDictBuilder {
|
||||
TaggedDictBuilder {
|
||||
tag: tag.into(),
|
||||
dict: IndexMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Build the contents of the builder into a Value
|
||||
pub fn build(tag: impl Into<Tag>, block: impl FnOnce(&mut TaggedDictBuilder)) -> Value {
|
||||
let mut builder = TaggedDictBuilder::new(tag);
|
||||
block(&mut builder);
|
||||
builder.into_value()
|
||||
}
|
||||
|
||||
/// Create a new builder with a pre-defined capacity
|
||||
pub fn with_capacity(tag: impl Into<Tag>, n: usize) -> TaggedDictBuilder {
|
||||
TaggedDictBuilder {
|
||||
tag: tag.into(),
|
||||
dict: IndexMap::with_capacity(n),
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert an untagged key/value pair into the dictionary, to later be tagged when built
|
||||
pub fn insert_untagged(&mut self, key: impl Into<String>, value: impl Into<UntaggedValue>) {
|
||||
self.dict
|
||||
.insert(key.into(), value.into().into_value(&self.tag));
|
||||
}
|
||||
|
||||
/// Insert a key/value pair into the dictionary
|
||||
pub fn insert_value(&mut self, key: impl Into<String>, value: impl Into<Value>) {
|
||||
self.dict.insert(key.into(), value.into());
|
||||
}
|
||||
|
||||
/// Convert the dictionary into a tagged Value using the original tag
|
||||
pub fn into_value(self) -> Value {
|
||||
let tag = self.tag.clone();
|
||||
self.into_untagged_value().into_value(tag)
|
||||
}
|
||||
|
||||
/// Convert the dictionary into an UntaggedValue
|
||||
pub fn into_untagged_value(self) -> UntaggedValue {
|
||||
UntaggedValue::Row(Dictionary { entries: self.dict })
|
||||
}
|
||||
|
||||
/// Returns true if the dictionary is empty, false otherwise
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.dict.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TaggedDictBuilder> for Value {
|
||||
/// Convert a builder into a tagged Value
|
||||
fn from(input: TaggedDictBuilder) -> Value {
|
||||
input.into_value()
|
||||
}
|
||||
}
|
108
crates/nu-protocol/src/value/evaluate.rs
Normal file
108
crates/nu-protocol/src/value/evaluate.rs
Normal file
@ -0,0 +1,108 @@
|
||||
use crate::value::{Primitive, UntaggedValue, Value};
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use query_interface::{interfaces, vtable_for, Object, ObjectHash};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||
use std::fmt::Debug;
|
||||
|
||||
/// An evaluation scope. Scopes map variable names to Values and aid in evaluating blocks and expressions.
|
||||
/// Additionally, holds the value for the special $it variable, a variable used to refer to the value passing
|
||||
/// through the pipeline at that moment
|
||||
#[derive(Debug)]
|
||||
pub struct Scope {
|
||||
pub it: Value,
|
||||
pub vars: IndexMap<String, Value>,
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
/// Create a new scope
|
||||
pub fn new(it: Value) -> Scope {
|
||||
Scope {
|
||||
it,
|
||||
vars: IndexMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
/// Create an empty scope
|
||||
pub fn empty() -> Scope {
|
||||
Scope {
|
||||
it: UntaggedValue::Primitive(Primitive::Nothing).into_untagged_value(),
|
||||
vars: IndexMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create an empty scope, setting $it to a known Value
|
||||
pub fn it_value(value: Value) -> Scope {
|
||||
Scope {
|
||||
it: value,
|
||||
vars: IndexMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[typetag::serde(tag = "type")]
|
||||
pub trait EvaluateTrait: Debug + Send + Sync + Object + ObjectHash + 'static {
|
||||
fn invoke(&self, scope: &Scope) -> Result<Value, ShellError>;
|
||||
fn clone_box(&self) -> Evaluate;
|
||||
}
|
||||
|
||||
interfaces!(Evaluate: dyn ObjectHash);
|
||||
|
||||
#[typetag::serde]
|
||||
impl EvaluateTrait for Evaluate {
|
||||
fn invoke(&self, scope: &Scope) -> Result<Value, ShellError> {
|
||||
self.expr.invoke(scope)
|
||||
}
|
||||
|
||||
fn clone_box(&self) -> Evaluate {
|
||||
self.expr.clone_box()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Evaluate {
|
||||
expr: Box<dyn EvaluateTrait>,
|
||||
}
|
||||
|
||||
impl Evaluate {
|
||||
pub fn new(evaluate: impl EvaluateTrait) -> Evaluate {
|
||||
Evaluate {
|
||||
expr: Box::new(evaluate),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for Evaluate {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.expr.obj_hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Evaluate {
|
||||
fn clone(&self) -> Evaluate {
|
||||
self.expr.clone_box()
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Evaluate {
|
||||
fn cmp(&self, _: &Self) -> Ordering {
|
||||
Ordering::Equal
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Evaluate {
|
||||
fn partial_cmp(&self, _: &Evaluate) -> Option<Ordering> {
|
||||
Some(Ordering::Equal)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Evaluate {
|
||||
fn eq(&self, _: &Evaluate) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Evaluate {}
|
262
crates/nu-protocol/src/value/primitive.rs
Normal file
262
crates/nu-protocol/src/value/primitive.rs
Normal file
@ -0,0 +1,262 @@
|
||||
use crate::type_name::ShellTypeName;
|
||||
use crate::value::column_path::ColumnPath;
|
||||
use crate::value::range::Range;
|
||||
use crate::value::{serde_bigdecimal, serde_bigint};
|
||||
use bigdecimal::BigDecimal;
|
||||
use chrono::{DateTime, Utc};
|
||||
use nu_errors::{ExpectedRange, ShellError};
|
||||
use nu_source::{PrettyDebug, Span, SpannedItem};
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::cast::{FromPrimitive, ToPrimitive};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// The most fundamental of structured values in Nu are the Primitive values. These values represent types like integers, strings, booleans, dates, etc that are then used
|
||||
/// as the buildig blocks to build up more complex structures.
|
||||
///
|
||||
/// Primitives also include marker values BeginningOfStream and EndOfStream which denote a change of condition in the stream
|
||||
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Deserialize, Serialize)]
|
||||
pub enum Primitive {
|
||||
/// An empty value
|
||||
Nothing,
|
||||
/// A "big int", an integer with arbitrarily large size (aka not limited to 64-bit)
|
||||
#[serde(with = "serde_bigint")]
|
||||
Int(BigInt),
|
||||
/// A "big decimal", an decimal number with arbitrarily large size (aka not limited to 64-bit)
|
||||
#[serde(with = "serde_bigdecimal")]
|
||||
Decimal(BigDecimal),
|
||||
/// A count in the number of bytes, used as a filesize
|
||||
Bytes(u64),
|
||||
/// A string value
|
||||
String(String),
|
||||
/// A string value with an implied carriage return (or cr/lf) ending
|
||||
Line(String),
|
||||
/// A path to travel to reach a value in a table
|
||||
ColumnPath(ColumnPath),
|
||||
/// A glob pattern, eg foo*
|
||||
Pattern(String),
|
||||
/// A boolean value
|
||||
Boolean(bool),
|
||||
/// A date value, in UTC
|
||||
Date(DateTime<Utc>),
|
||||
/// A count in the number of seconds
|
||||
Duration(u64),
|
||||
/// A range of values
|
||||
Range(Box<Range>),
|
||||
/// A file path
|
||||
Path(PathBuf),
|
||||
/// A vector of raw binary data
|
||||
#[serde(with = "serde_bytes")]
|
||||
Binary(Vec<u8>),
|
||||
|
||||
/// Beginning of stream marker, a pseudo-value not intended for tables
|
||||
BeginningOfStream,
|
||||
/// End of stream marker, a pseudo-value not intended for tables
|
||||
EndOfStream,
|
||||
}
|
||||
|
||||
impl Primitive {
|
||||
/// Converts a primitive value to a u64, if possible. Uses a span to build an error if the conversion isn't possible.
|
||||
pub fn as_u64(&self, span: Span) -> Result<u64, ShellError> {
|
||||
match self {
|
||||
Primitive::Int(int) => match int.to_u64() {
|
||||
None => Err(ShellError::range_error(
|
||||
ExpectedRange::U64,
|
||||
&format!("{}", int).spanned(span),
|
||||
"converting an integer into a 64-bit integer",
|
||||
)),
|
||||
Some(num) => Ok(num),
|
||||
},
|
||||
other => Err(ShellError::type_error(
|
||||
"integer",
|
||||
other.type_name().spanned(span),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BigDecimal> for Primitive {
|
||||
/// Helper to convert from decimals to a Primitive value
|
||||
fn from(decimal: BigDecimal) -> Primitive {
|
||||
Primitive::Decimal(decimal)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<f64> for Primitive {
|
||||
/// Helper to convert from 64-bit float to a Primitive value
|
||||
fn from(float: f64) -> Primitive {
|
||||
if let Some(f) = BigDecimal::from_f64(float) {
|
||||
Primitive::Decimal(f)
|
||||
} else {
|
||||
unreachable!("Internal error: protocol did not use f64-compatible decimal")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for Primitive {
|
||||
/// Get the name of the type of a Primitive value
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
Primitive::Nothing => "nothing",
|
||||
Primitive::Int(_) => "integer",
|
||||
Primitive::Range(_) => "range",
|
||||
Primitive::Decimal(_) => "decimal",
|
||||
Primitive::Bytes(_) => "bytes",
|
||||
Primitive::String(_) => "string",
|
||||
Primitive::Line(_) => "line",
|
||||
Primitive::ColumnPath(_) => "column path",
|
||||
Primitive::Pattern(_) => "pattern",
|
||||
Primitive::Boolean(_) => "boolean",
|
||||
Primitive::Date(_) => "date",
|
||||
Primitive::Duration(_) => "duration",
|
||||
Primitive::Path(_) => "file path",
|
||||
Primitive::Binary(_) => "binary",
|
||||
Primitive::BeginningOfStream => "marker<beginning of stream>",
|
||||
Primitive::EndOfStream => "marker<end of stream>",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Format a Primitive value into a string
|
||||
pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> String {
|
||||
match primitive {
|
||||
Primitive::Nothing => String::new(),
|
||||
Primitive::BeginningOfStream => String::new(),
|
||||
Primitive::EndOfStream => String::new(),
|
||||
Primitive::Path(p) => format!("{}", p.display()),
|
||||
Primitive::Bytes(b) => {
|
||||
let byte = byte_unit::Byte::from_bytes(*b as u128);
|
||||
|
||||
if byte.get_bytes() == 0u128 {
|
||||
return "—".to_string();
|
||||
}
|
||||
|
||||
let byte = byte.get_appropriate_unit(false);
|
||||
|
||||
match byte.get_unit() {
|
||||
byte_unit::ByteUnit::B => format!("{} B ", byte.get_value()),
|
||||
_ => byte.format(1),
|
||||
}
|
||||
}
|
||||
Primitive::Duration(sec) => format_duration(*sec),
|
||||
Primitive::Int(i) => i.to_string(),
|
||||
Primitive::Decimal(decimal) => format!("{:.4}", decimal),
|
||||
Primitive::Range(range) => format!(
|
||||
"{}..{}",
|
||||
format_primitive(&range.from.0.item, None),
|
||||
format_primitive(&range.to.0.item, None)
|
||||
),
|
||||
Primitive::Pattern(s) => s.to_string(),
|
||||
Primitive::String(s) => s.to_owned(),
|
||||
Primitive::Line(s) => s.to_owned(),
|
||||
Primitive::ColumnPath(p) => {
|
||||
let mut members = p.iter();
|
||||
let mut f = String::new();
|
||||
|
||||
f.push_str(
|
||||
&members
|
||||
.next()
|
||||
.expect("BUG: column path with zero members")
|
||||
.display(),
|
||||
);
|
||||
|
||||
for member in members {
|
||||
f.push_str(".");
|
||||
f.push_str(&member.display())
|
||||
}
|
||||
|
||||
f
|
||||
}
|
||||
Primitive::Boolean(b) => match (b, field_name) {
|
||||
(true, None) => "Yes",
|
||||
(false, None) => "No",
|
||||
(true, Some(s)) if !s.is_empty() => s,
|
||||
(false, Some(s)) if !s.is_empty() => "",
|
||||
(true, Some(_)) => "Yes",
|
||||
(false, Some(_)) => "No",
|
||||
}
|
||||
.to_owned(),
|
||||
Primitive::Binary(_) => "<binary>".to_owned(),
|
||||
Primitive::Date(d) => format_date(d),
|
||||
}
|
||||
}
|
||||
|
||||
/// Format a duration in seconds into a string
|
||||
pub fn format_duration(sec: u64) -> String {
|
||||
let (minutes, seconds) = (sec / 60, sec % 60);
|
||||
let (hours, minutes) = (minutes / 60, minutes % 60);
|
||||
let (days, hours) = (hours / 24, hours % 24);
|
||||
|
||||
match (days, hours, minutes, seconds) {
|
||||
(0, 0, 0, 1) => "1 sec".to_owned(),
|
||||
(0, 0, 0, s) => format!("{} secs", s),
|
||||
(0, 0, m, s) => format!("{}:{:02}", m, s),
|
||||
(0, h, m, s) => format!("{}:{:02}:{:02}", h, m, s),
|
||||
(d, h, m, s) => format!("{}:{:02}:{:02}:{:02}", d, h, m, s),
|
||||
}
|
||||
}
|
||||
|
||||
/// Format a UTC date value into a humanized string (eg "1 week ago" instead of a formal date string)
|
||||
pub fn format_date(d: &DateTime<Utc>) -> String {
|
||||
let utc: DateTime<Utc> = Utc::now();
|
||||
|
||||
let duration = utc.signed_duration_since(*d);
|
||||
|
||||
if duration.num_weeks() >= 52 {
|
||||
let num_years = duration.num_weeks() / 52;
|
||||
|
||||
format!(
|
||||
"{} year{} ago",
|
||||
num_years,
|
||||
if num_years == 1 { "" } else { "s" }
|
||||
)
|
||||
} else if duration.num_weeks() >= 4 {
|
||||
let num_months = duration.num_weeks() / 4;
|
||||
|
||||
format!(
|
||||
"{} month{} ago",
|
||||
num_months,
|
||||
if num_months == 1 { "" } else { "s" }
|
||||
)
|
||||
} else if duration.num_weeks() >= 1 {
|
||||
let num_weeks = duration.num_weeks();
|
||||
|
||||
format!(
|
||||
"{} week{} ago",
|
||||
num_weeks,
|
||||
if num_weeks == 1 { "" } else { "s" }
|
||||
)
|
||||
} else if duration.num_days() >= 1 {
|
||||
let num_days = duration.num_days();
|
||||
|
||||
format!(
|
||||
"{} day{} ago",
|
||||
num_days,
|
||||
if num_days == 1 { "" } else { "s" }
|
||||
)
|
||||
} else if duration.num_hours() >= 1 {
|
||||
let num_hours = duration.num_hours();
|
||||
|
||||
format!(
|
||||
"{} hour{} ago",
|
||||
num_hours,
|
||||
if num_hours == 1 { "" } else { "s" }
|
||||
)
|
||||
} else if duration.num_minutes() >= 1 {
|
||||
let num_minutes = duration.num_minutes();
|
||||
|
||||
format!(
|
||||
"{} min{} ago",
|
||||
num_minutes,
|
||||
if num_minutes == 1 { "" } else { "s" }
|
||||
)
|
||||
} else {
|
||||
let num_seconds = duration.num_seconds();
|
||||
|
||||
format!(
|
||||
"{} sec{} ago",
|
||||
num_seconds,
|
||||
if num_seconds == 1 { "" } else { "s" }
|
||||
)
|
||||
}
|
||||
}
|
37
crates/nu-protocol/src/value/range.rs
Normal file
37
crates/nu-protocol/src/value/range.rs
Normal file
@ -0,0 +1,37 @@
|
||||
use crate::value::Primitive;
|
||||
use derive_new::new;
|
||||
use nu_source::{b, DebugDocBuilder, Spanned};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// The two types of ways to include a range end. Inclusive means to include the value (eg 1..3 inclusive would include the 3 value).
|
||||
/// Exclusive excludes the value (eg 1..3 exclusive does not include 3 value)
|
||||
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
||||
pub enum RangeInclusion {
|
||||
Inclusive,
|
||||
Exclusive,
|
||||
}
|
||||
|
||||
impl RangeInclusion {
|
||||
/// Get a RangeInclusion left bracket ready for pretty printing
|
||||
pub fn debug_left_bracket(self) -> DebugDocBuilder {
|
||||
b::delimiter(match self {
|
||||
RangeInclusion::Exclusive => "(",
|
||||
RangeInclusion::Inclusive => "[",
|
||||
})
|
||||
}
|
||||
|
||||
/// Get a RangeInclusion right bracket ready for pretty printing
|
||||
pub fn debug_right_bracket(self) -> DebugDocBuilder {
|
||||
b::delimiter(match self {
|
||||
RangeInclusion::Exclusive => ")",
|
||||
RangeInclusion::Inclusive => "]",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The range definition, holding the starting and end point of the range
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize, new)]
|
||||
pub struct Range {
|
||||
pub from: (Spanned<Primitive>, RangeInclusion),
|
||||
pub to: (Spanned<Primitive>, RangeInclusion),
|
||||
}
|
26
crates/nu-protocol/src/value/serde_bigdecimal.rs
Normal file
26
crates/nu-protocol/src/value/serde_bigdecimal.rs
Normal file
@ -0,0 +1,26 @@
|
||||
use bigdecimal::BigDecimal;
|
||||
use num_traits::cast::FromPrimitive;
|
||||
use num_traits::cast::ToPrimitive;
|
||||
|
||||
/// Enable big decimal serialization by providing a `serialize` function
|
||||
pub fn serialize<S>(big_decimal: &BigDecimal, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serde::Serialize::serialize(
|
||||
&big_decimal
|
||||
.to_f64()
|
||||
.ok_or_else(|| serde::ser::Error::custom("expected a f64-sized bignum"))?,
|
||||
serializer,
|
||||
)
|
||||
}
|
||||
|
||||
/// Enable big decimal deserialization by providing a `deserialize` function
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<BigDecimal, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let x: f64 = serde::Deserialize::deserialize(deserializer)?;
|
||||
Ok(BigDecimal::from_f64(x)
|
||||
.ok_or_else(|| serde::de::Error::custom("expected a f64-sized bigdecimal"))?)
|
||||
}
|
26
crates/nu-protocol/src/value/serde_bigint.rs
Normal file
26
crates/nu-protocol/src/value/serde_bigint.rs
Normal file
@ -0,0 +1,26 @@
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::cast::FromPrimitive;
|
||||
use num_traits::cast::ToPrimitive;
|
||||
|
||||
/// Enable big int serialization by providing a `serialize` function
|
||||
pub fn serialize<S>(big_int: &BigInt, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serde::Serialize::serialize(
|
||||
&big_int
|
||||
.to_i64()
|
||||
.ok_or_else(|| serde::ser::Error::custom("expected a i64-sized bignum"))?,
|
||||
serializer,
|
||||
)
|
||||
}
|
||||
|
||||
/// Enable big int deserialization by providing a `deserialize` function
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<BigInt, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let x: i64 = serde::Deserialize::deserialize(deserializer)?;
|
||||
Ok(BigInt::from_i64(x)
|
||||
.ok_or_else(|| serde::de::Error::custom("expected a i64-sized bignum"))?)
|
||||
}
|
@ -1,16 +1,16 @@
|
||||
[package]
|
||||
name = "nu-source"
|
||||
version = "0.1.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>"]
|
||||
version = "0.9.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "A source string characterizer for Nushell"
|
||||
license = "MIT"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
|
||||
serde = { version = "1.0.102", features = ["derive"] }
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
derive-new = "0.5.8"
|
||||
getset = "0.0.9"
|
||||
nom_locate = "1.0.0"
|
||||
@ -18,3 +18,6 @@ nom-tracable = "0.4.1"
|
||||
language-reporting = "0.4.0"
|
||||
termcolor = "1.0.5"
|
||||
pretty = "0.5.2"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||
|
30
crates/nu-source/README.md
Normal file
30
crates/nu-source/README.md
Normal file
@ -0,0 +1,30 @@
|
||||
# nu-source
|
||||
|
||||
## Overview
|
||||
|
||||
The `nu-source` crate contains types and traits used for keeping track of _metadata_ about values being processed.
|
||||
Nu uses `Tag`s to keep track of where a value came from, an `AnchorLocation`,
|
||||
as well as positional information about the value, a `Span`.
|
||||
An `AchorLocation` can be a `Url`, `File`, or `Source` text that a value was parsed from.
|
||||
The source `Text` is special in that it is a type similar to a `String` that comes with the ability to be cheaply cloned.
|
||||
A `Span` keeps track of a value's `start` and `end` positions.
|
||||
These types make up the metadata for a value and are wrapped up together in a `Tagged` struct,
|
||||
which holds everything needed to track and locate a value.
|
||||
|
||||
|
||||
Nu's metadata system can be seen when reporting errors.
|
||||
In the following example Nu is able to report to the user where the typo of a column originated from.
|
||||
|
||||
```
|
||||
1 | ls | get typ
|
||||
| ^^^ did you mean 'type'?
|
||||
```
|
||||
|
||||
In addition to metadata tracking, `nu-source` also contains types and traits related to debugging, tracing, and formatting the metadata and values it processes.
|
||||
|
||||
## Other Resources
|
||||
- [Nushell Github Project](https://github.com/nushell): Contains all projects in the Nushell ecosystem such as the source code to Nushell as well as website and books.
|
||||
- [Nushell Git Repository](https://github.com/nushell/nushell): A direct link to the source git repository for Nushell
|
||||
- [Nushell Contributor Book](https://github.com/nushell/contributor-book): An overview of topics about Nushell to help you get started contributing to the project.
|
||||
- [Discord Channel](https://discordapp.com/invite/NtAbbGn)
|
||||
- [Twitter](https://twitter.com/nu_shell)
|
3
crates/nu-source/build.rs
Normal file
3
crates/nu-source/build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user