mirror of
https://github.com/nushell/nushell.git
synced 2025-07-01 07:00:37 +02:00
Compare commits
66 Commits
Author | SHA1 | Date | |
---|---|---|---|
407f36af29 | |||
763fcbc137 | |||
7061af712e | |||
9b4ba09c95 | |||
9ec6d0c90e | |||
f20a4a42e8 | |||
caa6830184 | |||
f8be1becf2 | |||
af51a0e6f0 | |||
23d11d5e84 | |||
6da9e2aced | |||
32dfb32741 | |||
d48f99cb0e | |||
35359cbc22 | |||
b52dbcc8ef | |||
4429a75e17 | |||
583f27dc41 | |||
83db5c34c3 | |||
cdbfdf282f | |||
a5e1372bc2 | |||
798a24eda5 | |||
a2bb23d78c | |||
d38a63473b | |||
2b37ae3e81 | |||
bc5a969562 | |||
fe4ad5f77e | |||
07191754bf | |||
66bd331ba9 | |||
762c798670 | |||
3c01526869 | |||
7efb31a4e4 | |||
c8dd7838a8 | |||
3b57ee5dda | |||
fb977ab941 | |||
e059c74a06 | |||
47d987d37f | |||
3abfefc025 | |||
a5c5b4e711 | |||
ba9cb753d5 | |||
ba7a1752db | |||
29431e73c2 | |||
d29fe6f6de | |||
e2e9abab0a | |||
2956b0b087 | |||
b32eceffb3 | |||
3adf52b1c4 | |||
78a644da2b | |||
98028433ad | |||
2ab5803f00 | |||
65980c7beb | |||
29fd8b55fb | |||
2f039b3abc | |||
d3dae05714 | |||
5fd3191d91 | |||
0dcd90cb8f | |||
02d0a4107e | |||
63885c4ee6 | |||
147bfefd7e | |||
60043df917 | |||
6d3a30772d | |||
347f91ab53 | |||
5692a08e7f | |||
515a3b33f8 | |||
c3e466e464 | |||
00c0327031 | |||
7451414b9e |
@ -4,25 +4,25 @@ trigger:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
linux-stable:
|
linux-stable:
|
||||||
image: ubuntu-16.04
|
image: ubuntu-18.04
|
||||||
style: 'unflagged'
|
style: 'unflagged'
|
||||||
macos-stable:
|
macos-stable:
|
||||||
image: macos-10.14
|
image: macos-10.14
|
||||||
style: 'unflagged'
|
style: 'unflagged'
|
||||||
windows-stable:
|
windows-stable:
|
||||||
image: vs2017-win2016
|
image: windows-2019
|
||||||
style: 'unflagged'
|
style: 'unflagged'
|
||||||
linux-nightly-canary:
|
linux-nightly-canary:
|
||||||
image: ubuntu-16.04
|
image: ubuntu-18.04
|
||||||
style: 'canary'
|
style: 'canary'
|
||||||
macos-nightly-canary:
|
macos-nightly-canary:
|
||||||
image: macos-10.14
|
image: macos-10.14
|
||||||
style: 'canary'
|
style: 'canary'
|
||||||
windows-nightly-canary:
|
windows-nightly-canary:
|
||||||
image: vs2017-win2016
|
image: windows-2019
|
||||||
style: 'canary'
|
style: 'canary'
|
||||||
fmt:
|
fmt:
|
||||||
image: ubuntu-16.04
|
image: ubuntu-18.04
|
||||||
style: 'fmt'
|
style: 'fmt'
|
||||||
|
|
||||||
pool:
|
pool:
|
||||||
@ -35,20 +35,22 @@ steps:
|
|||||||
then
|
then
|
||||||
sudo apt-get -y install libxcb-composite0-dev libx11-dev
|
sudo apt-get -y install libxcb-composite0-dev libx11-dev
|
||||||
fi
|
fi
|
||||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable"
|
if [ "$(uname)" == "Darwin" ]; then
|
||||||
export PATH=$HOME/.cargo/bin:$PATH
|
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable"
|
||||||
rustup update
|
export PATH=$HOME/.cargo/bin:$PATH
|
||||||
|
rustup update
|
||||||
|
fi
|
||||||
rustc -Vv
|
rustc -Vv
|
||||||
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
||||||
rustup component add rustfmt --toolchain "stable"
|
rustup component add rustfmt
|
||||||
displayName: Install Rust
|
displayName: Install Rust
|
||||||
- bash: RUSTFLAGS="-D warnings" cargo test --all --features=stable
|
- bash: RUSTFLAGS="-D warnings" cargo test --all --features stable,test-bins
|
||||||
condition: eq(variables['style'], 'unflagged')
|
condition: eq(variables['style'], 'unflagged')
|
||||||
displayName: Run tests
|
displayName: Run tests
|
||||||
- bash: RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
|
- bash: RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
|
||||||
condition: eq(variables['style'], 'unflagged')
|
condition: eq(variables['style'], 'unflagged')
|
||||||
displayName: Check clippy lints
|
displayName: Check clippy lints
|
||||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features=stable
|
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features stable,test-bins
|
||||||
condition: eq(variables['style'], 'canary')
|
condition: eq(variables['style'], 'canary')
|
||||||
displayName: Run tests
|
displayName: Run tests
|
||||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
|
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
|
||||||
@ -57,3 +59,4 @@ steps:
|
|||||||
- bash: cargo fmt --all -- --check
|
- bash: cargo fmt --all -- --check
|
||||||
condition: eq(variables['style'], 'fmt')
|
condition: eq(variables['style'], 'fmt')
|
||||||
displayName: Lint
|
displayName: Lint
|
||||||
|
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -3,7 +3,7 @@
|
|||||||
**/*.rs.bk
|
**/*.rs.bk
|
||||||
history.txt
|
history.txt
|
||||||
tests/fixtures/nuplayground
|
tests/fixtures/nuplayground
|
||||||
|
crates/*/target
|
||||||
# Debian/Ubuntu
|
# Debian/Ubuntu
|
||||||
debian/.debhelper/
|
debian/.debhelper/
|
||||||
debian/debhelper-build-stamp
|
debian/debhelper-build-stamp
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
image:
|
image:
|
||||||
file: .gitpod.Dockerfile
|
file: .gitpod.Dockerfile
|
||||||
tasks:
|
tasks:
|
||||||
- init: cargo install nu --features=stable
|
- init: cargo install --path . --force --features=stable
|
||||||
command: nu
|
command: nu
|
||||||
github:
|
github:
|
||||||
prebuilds:
|
prebuilds:
|
||||||
@ -19,3 +19,10 @@ github:
|
|||||||
addBadge: false
|
addBadge: false
|
||||||
# add a label once the prebuild is ready to pull requests (defaults to false)
|
# add a label once the prebuild is ready to pull requests (defaults to false)
|
||||||
addLabel: prebuilt-in-gitpod
|
addLabel: prebuilt-in-gitpod
|
||||||
|
vscode:
|
||||||
|
extensions:
|
||||||
|
- hbenl.vscode-test-explorer@2.15.0:koqDUMWDPJzELp/hdS/lWw==
|
||||||
|
- Swellaby.vscode-rust-test-adapter@0.11.0:Xg+YeZZQiVpVUsIkH+uiiw==
|
||||||
|
- serayuzgur.crates@0.4.7:HMkoguLcXp9M3ud7ac3eIw==
|
||||||
|
- belfz.search-crates-io@1.2.1:kSLnyrOhXtYPjQpKnMr4eQ==
|
||||||
|
- vadimcn.vscode-lldb@1.4.5:lwHCNwtm0kmOBXeQUIPGMQ==
|
||||||
|
802
Cargo.lock
generated
802
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
108
Cargo.toml
108
Cargo.toml
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
description = "A shell for the GitHub era"
|
description = "A shell for the GitHub era"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
@ -39,30 +39,29 @@ members = [
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-source = { version = "0.8.0", path = "./crates/nu-source" }
|
nu-source = {version = "0.9.0", path = "./crates/nu-source"}
|
||||||
nu-plugin = { version = "0.8.0", path = "./crates/nu-plugin" }
|
nu-plugin = {version = "0.9.0", path = "./crates/nu-plugin"}
|
||||||
nu-protocol = { version = "0.8.0", path = "./crates/nu-protocol" }
|
nu-protocol = {version = "0.9.0", path = "./crates/nu-protocol"}
|
||||||
nu-errors = { version = "0.8.0", path = "./crates/nu-errors" }
|
nu-errors = {version = "0.9.0", path = "./crates/nu-errors"}
|
||||||
nu-parser = { version = "0.8.0", path = "./crates/nu-parser" }
|
nu-parser = {version = "0.9.0", path = "./crates/nu-parser"}
|
||||||
nu-value-ext = { version = "0.8.0", path = "./crates/nu-value-ext" }
|
nu-value-ext = {version = "0.9.0", path = "./crates/nu-value-ext"}
|
||||||
nu_plugin_average = {version = "0.8.0", path = "./crates/nu_plugin_average", optional=true}
|
nu_plugin_average = {version = "0.9.0", path = "./crates/nu_plugin_average", optional=true}
|
||||||
nu_plugin_binaryview = {version = "0.8.0", path = "./crates/nu_plugin_binaryview", optional=true}
|
nu_plugin_binaryview = {version = "0.9.0", path = "./crates/nu_plugin_binaryview", optional=true}
|
||||||
nu_plugin_fetch = {version = "0.8.0", path = "./crates/nu_plugin_fetch", optional=true}
|
nu_plugin_fetch = {version = "0.9.0", path = "./crates/nu_plugin_fetch", optional=true}
|
||||||
nu_plugin_inc = {version = "0.8.0", path = "./crates/nu_plugin_inc", optional=true}
|
nu_plugin_inc = {version = "0.9.0", path = "./crates/nu_plugin_inc", optional=true}
|
||||||
nu_plugin_match = {version = "0.8.0", path = "./crates/nu_plugin_match", optional=true}
|
nu_plugin_match = {version = "0.9.0", path = "./crates/nu_plugin_match", optional=true}
|
||||||
nu_plugin_post = {version = "0.8.0", path = "./crates/nu_plugin_post", optional=true}
|
nu_plugin_post = {version = "0.9.0", path = "./crates/nu_plugin_post", optional=true}
|
||||||
nu_plugin_ps = {version = "0.8.0", path = "./crates/nu_plugin_ps", optional=true}
|
nu_plugin_ps = {version = "0.9.0", path = "./crates/nu_plugin_ps", optional=true}
|
||||||
nu_plugin_str = {version = "0.8.0", path = "./crates/nu_plugin_str", optional=true}
|
nu_plugin_str = {version = "0.9.0", path = "./crates/nu_plugin_str", optional=true}
|
||||||
nu_plugin_sum = {version = "0.8.0", path = "./crates/nu_plugin_sum", optional=true}
|
nu_plugin_sum = {version = "0.9.0", path = "./crates/nu_plugin_sum", optional=true}
|
||||||
nu_plugin_sys = {version = "0.8.0", path = "./crates/nu_plugin_sys", optional=true}
|
nu_plugin_sys = {version = "0.9.0", path = "./crates/nu_plugin_sys", optional=true}
|
||||||
nu_plugin_textview = {version = "0.8.0", path = "./crates/nu_plugin_textview", optional=true}
|
nu_plugin_textview = {version = "0.9.0", path = "./crates/nu_plugin_textview", optional=true}
|
||||||
nu_plugin_tree = {version = "0.8.0", path = "./crates/nu_plugin_tree", optional=true}
|
nu_plugin_tree = {version = "0.9.0", path = "./crates/nu_plugin_tree", optional=true}
|
||||||
nu-macros = { version = "0.8.0", path = "./crates/nu-macros" }
|
nu-macros = { version = "0.9.0", path = "./crates/nu-macros" }
|
||||||
|
|
||||||
|
|
||||||
query_interface = "0.3.5"
|
query_interface = "0.3.5"
|
||||||
typetag = "0.1.4"
|
typetag = "0.1.4"
|
||||||
rustyline = "5.0.6"
|
rustyline = "6.0.0"
|
||||||
chrono = { version = "0.4.10", features = ["serde"] }
|
chrono = { version = "0.4.10", features = ["serde"] }
|
||||||
derive-new = "0.5.8"
|
derive-new = "0.5.8"
|
||||||
prettytable-rs = "0.8.0"
|
prettytable-rs = "0.8.0"
|
||||||
@ -70,19 +69,18 @@ itertools = "0.8.2"
|
|||||||
ansi_term = "0.12.1"
|
ansi_term = "0.12.1"
|
||||||
nom = "5.0.1"
|
nom = "5.0.1"
|
||||||
dunce = "1.0.0"
|
dunce = "1.0.0"
|
||||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
indexmap = { version = "1.3.1", features = ["serde-1"] }
|
||||||
chrono-humanize = "0.0.11"
|
|
||||||
byte-unit = "3.0.3"
|
byte-unit = "3.0.3"
|
||||||
base64 = "0.11"
|
base64 = "0.11"
|
||||||
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
||||||
async-stream = "0.1.2"
|
async-stream = "0.1.2"
|
||||||
futures_codec = "0.2.5"
|
futures_codec = "0.2.5"
|
||||||
num-traits = "0.2.10"
|
num-traits = "0.2.11"
|
||||||
term = "0.5.2"
|
term = "0.5.2"
|
||||||
bytes = "0.4.12"
|
bytes = "0.4.12"
|
||||||
log = "0.4.8"
|
log = "0.4.8"
|
||||||
pretty_env_logger = "0.3.1"
|
pretty_env_logger = "0.3.1"
|
||||||
serde = { version = "1.0.103", features = ["derive"] }
|
serde = { version = "1.0.104", features = ["derive"] }
|
||||||
bson = { version = "0.14.0", features = ["decimal128"] }
|
bson = { version = "0.14.0", features = ["decimal128"] }
|
||||||
serde_json = "1.0.44"
|
serde_json = "1.0.44"
|
||||||
serde-hjson = "0.9.1"
|
serde-hjson = "0.9.1"
|
||||||
@ -92,26 +90,26 @@ getset = "0.0.9"
|
|||||||
language-reporting = "0.4.0"
|
language-reporting = "0.4.0"
|
||||||
app_dirs = "1.2.1"
|
app_dirs = "1.2.1"
|
||||||
csv = "1.1"
|
csv = "1.1"
|
||||||
toml = "0.5.5"
|
toml = "0.5.6"
|
||||||
clap = "2.33.0"
|
clap = "2.33.0"
|
||||||
git2 = { version = "0.10.2", default_features = false }
|
git2 = { version = "0.11.0", default_features = false }
|
||||||
dirs = "2.0.2"
|
dirs = "2.0.2"
|
||||||
glob = "0.3.0"
|
glob = "0.3.0"
|
||||||
ctrlc = "3.1.3"
|
ctrlc = "3.1.3"
|
||||||
roxmltree = "0.7.3"
|
roxmltree = "0.9.0"
|
||||||
nom_locate = "1.0.0"
|
nom_locate = "1.0.0"
|
||||||
nom-tracable = "0.4.1"
|
nom-tracable = "0.4.1"
|
||||||
unicode-xid = "0.2.0"
|
unicode-xid = "0.2.0"
|
||||||
serde_ini = "0.2.0"
|
serde_ini = "0.2.0"
|
||||||
subprocess = "0.1.18"
|
|
||||||
pretty-hex = "0.1.1"
|
pretty-hex = "0.1.1"
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
tempfile = "3.1.0"
|
tempfile = "3.1.0"
|
||||||
ichwh = "0.2"
|
which = "3.1.0"
|
||||||
|
ichwh = "0.3"
|
||||||
textwrap = {version = "0.11.0", features = ["term_size"]}
|
textwrap = {version = "0.11.0", features = ["term_size"]}
|
||||||
shellexpand = "1.0.0"
|
shellexpand = "1.1.1"
|
||||||
pin-utils = "0.1.0-alpha.4"
|
pin-utils = "0.1.0-alpha.4"
|
||||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
num-bigint = { version = "0.2.5", features = ["serde"] }
|
||||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||||
serde_urlencoded = "0.6.1"
|
serde_urlencoded = "0.6.1"
|
||||||
trash = "1.0.0"
|
trash = "1.0.0"
|
||||||
@ -121,28 +119,35 @@ strip-ansi-escapes = "0.1.0"
|
|||||||
calamine = "0.16"
|
calamine = "0.16"
|
||||||
umask = "0.1"
|
umask = "0.1"
|
||||||
futures-util = "0.3.1"
|
futures-util = "0.3.1"
|
||||||
termcolor = "1.0.5"
|
termcolor = "1.1.0"
|
||||||
natural = "0.3.0"
|
natural = "0.3.0"
|
||||||
parking_lot = "0.10.0"
|
parking_lot = "0.10.0"
|
||||||
|
futures-timer = "1.0.2"
|
||||||
|
|
||||||
clipboard = {version = "0.5", optional = true }
|
clipboard = {version = "0.5", optional = true }
|
||||||
ptree = {version = "0.2" }
|
ptree = {version = "0.2" }
|
||||||
starship = { version = "0.28", optional = true}
|
starship = { version = "0.33.1", optional = true}
|
||||||
heim = {version = "0.0.9", optional = true}
|
heim = {version = "0.0.9", optional = true}
|
||||||
battery = {version = "0.7.5", optional = true}
|
battery = {version = "0.7.5", optional = true}
|
||||||
syntect = {version = "3.2.0", optional = true }
|
syntect = {version = "3.2.0", optional = true }
|
||||||
onig_sys = {version = "=69.1.0", optional = true }
|
onig_sys = {version = "=69.1.0", optional = true }
|
||||||
crossterm = {version = "0.10.2", optional = true}
|
crossterm = {version = "0.14.2", optional = true}
|
||||||
futures-timer = {version = "1.0.2", optional = true}
|
url = {version = "2.1.1", optional = true}
|
||||||
url = {version = "2.1.0", optional = true}
|
semver = {version = "0.9.0", optional = true}
|
||||||
|
|
||||||
|
[target.'cfg(unix)'.dependencies]
|
||||||
|
users = "0.9"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
# Test executables
|
||||||
|
test-bins = []
|
||||||
|
|
||||||
default = ["sys", "ps", "textview", "inc", "str"]
|
default = ["sys", "ps", "textview", "inc", "str"]
|
||||||
stable = ["sys", "ps", "textview", "inc", "str", "starship-prompt", "binaryview", "match", "tree", "average", "sum", "post", "fetch", "clipboard"]
|
stable = ["default", "starship-prompt", "binaryview", "match", "tree", "average", "sum", "post", "fetch", "clipboard"]
|
||||||
|
|
||||||
# Default
|
# Default
|
||||||
sys = ["heim", "battery"]
|
sys = ["heim", "battery"]
|
||||||
ps = ["heim", "futures-timer"]
|
ps = ["heim"]
|
||||||
textview = ["crossterm", "syntect", "onig_sys", "url"]
|
textview = ["crossterm", "syntect", "onig_sys", "url"]
|
||||||
inc = ["nu_plugin_inc"]
|
inc = ["nu_plugin_inc"]
|
||||||
str = ["nu_plugin_str"]
|
str = ["nu_plugin_str"]
|
||||||
@ -164,18 +169,33 @@ features = ["bundled", "blob"]
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
pretty_assertions = "0.6.1"
|
pretty_assertions = "0.6.1"
|
||||||
nu-test-support = { version = "0.8.0", path = "./crates/nu-test-support" }
|
nu-test-support = { version = "0.9.0", path = "./crates/nu-test-support" }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
toml = "0.5.5"
|
toml = "0.5.6"
|
||||||
serde = { version = "1.0.103", features = ["derive"] }
|
serde = { version = "1.0.104", features = ["derive"] }
|
||||||
nu-build = { version = "0.8.0", path = "./crates/nu-build" }
|
nu-build = { version = "0.9.0", path = "./crates/nu-build" }
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
doctest = false
|
doctest = false
|
||||||
path = "src/lib.rs"
|
path = "src/lib.rs"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "fail"
|
||||||
|
path = "crates/nu-test-support/src/bins/fail.rs"
|
||||||
|
required-features = ["test-bins"]
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "chop"
|
||||||
|
path = "crates/nu-test-support/src/bins/chop.rs"
|
||||||
|
required-features = ["test-bins"]
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "cococo"
|
||||||
|
path = "crates/nu-test-support/src/bins/cococo.rs"
|
||||||
|
required-features = ["test-bins"]
|
||||||
|
|
||||||
# Core plugins that ship with `cargo install nu` by default
|
# Core plugins that ship with `cargo install nu` by default
|
||||||
# Currently, Cargo limits us to installing only one binary
|
# Currently, Cargo limits us to installing only one binary
|
||||||
# unless we use [[bin]], so we use this as a workaround
|
# unless we use [[bin]], so we use this as a workaround
|
||||||
|
141
README.md
141
README.md
@ -1,3 +1,4 @@
|
|||||||
|
[](https://gitpod.io/#https://github.com/nushell/nushell)
|
||||||
[](https://crates.io/crates/nu)
|
[](https://crates.io/crates/nu)
|
||||||
[](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master)
|
[](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master)
|
||||||
[](https://discord.gg/NtAbbGn)
|
[](https://discord.gg/NtAbbGn)
|
||||||
@ -6,7 +7,7 @@
|
|||||||
|
|
||||||
# Nu Shell
|
# Nu Shell
|
||||||
|
|
||||||
A modern shell for the GitHub era.
|
A new type of shell.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@ -24,6 +25,8 @@ If you're a developer who would like to contribute to Nu, we're also working on
|
|||||||
|
|
||||||
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
|
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
|
||||||
|
|
||||||
|
You can also find more learning resources in our [documentation](https://www.nushell.sh/documentation.html) site.
|
||||||
|
|
||||||
Try it in Gitpod.
|
Try it in Gitpod.
|
||||||
|
|
||||||
[](https://gitpod.io/#https://github.com/nushell/nushell)
|
[](https://gitpod.io/#https://github.com/nushell/nushell)
|
||||||
@ -118,7 +121,7 @@ Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing lef
|
|||||||
|
|
||||||
```
|
```
|
||||||
/home/jonathan/Source/nushell(master)> ls | where type == "Directory" | autoview
|
/home/jonathan/Source/nushell(master)> ls | where type == "Directory" | autoview
|
||||||
━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
────┬───────────┬───────────┬──────────┬────────┬──────────────┬────────────────
|
||||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||||
────┼───────────┼───────────┼──────────┼────────┼──────────────┼────────────────
|
────┼───────────┼───────────┼──────────┼────────┼──────────────┼────────────────
|
||||||
0 │ .azure │ Directory │ │ 4.1 KB │ 2 months ago │ a day ago
|
0 │ .azure │ Directory │ │ 4.1 KB │ 2 months ago │ a day ago
|
||||||
@ -129,7 +132,7 @@ Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing lef
|
|||||||
5 │ src │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
5 │ src │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||||
6 │ assets │ Directory │ │ 4.1 KB │ a month ago │ a month ago
|
6 │ assets │ Directory │ │ 4.1 KB │ a month ago │ a month ago
|
||||||
7 │ docs │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago
|
7 │ docs │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago
|
||||||
━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
────┴───────────┴───────────┴──────────┴────────┴──────────────┴────────────────
|
||||||
```
|
```
|
||||||
|
|
||||||
Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed. We could have also written the above:
|
Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed. We could have also written the above:
|
||||||
@ -142,15 +145,14 @@ Being able to use the same commands and compose them differently is an important
|
|||||||
|
|
||||||
```text
|
```text
|
||||||
/home/jonathan/Source/nushell(master)> ps | where cpu > 0
|
/home/jonathan/Source/nushell(master)> ps | where cpu > 0
|
||||||
━━━┯━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━
|
───┬───────┬─────────────────┬──────────┬──────────
|
||||||
# │ pid │ name │ status │ cpu
|
# │ pid │ name │ status │ cpu
|
||||||
───┼───────┼─────────────────┼──────────┼──────────
|
───┼───────┼─────────────────┼──────────┼──────────
|
||||||
0 │ 992 │ chrome │ Sleeping │ 6.988768
|
0 │ 992 │ chrome │ Sleeping │ 6.988768
|
||||||
1 │ 4240 │ chrome │ Sleeping │ 5.645982
|
1 │ 4240 │ chrome │ Sleeping │ 5.645982
|
||||||
2 │ 13973 │ qemu-system-x86 │ Sleeping │ 4.996551
|
2 │ 13973 │ qemu-system-x86 │ Sleeping │ 4.996551
|
||||||
3 │ 15746 │ nu │ Sleeping │ 84.59905
|
3 │ 15746 │ nu │ Sleeping │ 84.59905
|
||||||
━━━┷━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━
|
───┴───────┴─────────────────┴──────────┴──────────
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Opening files
|
## Opening files
|
||||||
@ -159,29 +161,29 @@ Nu can load file and URL contents as raw text or as structured data (if it recog
|
|||||||
|
|
||||||
```
|
```
|
||||||
/home/jonathan/Source/nushell(master)> open Cargo.toml
|
/home/jonathan/Source/nushell(master)> open Cargo.toml
|
||||||
━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━
|
──────────────────┬────────────────┬──────────────────
|
||||||
bin │ dependencies │ dev-dependencies
|
bin │ dependencies │ dev-dependencies
|
||||||
──────────────────┼────────────────┼──────────────────
|
──────────────────┼────────────────┼──────────────────
|
||||||
[table: 12 rows] │ [table: 1 row] │ [table: 1 row]
|
[table: 12 rows] │ [table: 1 row] │ [table: 1 row]
|
||||||
━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━
|
──────────────────┴────────────────┴──────────────────
|
||||||
```
|
```
|
||||||
|
|
||||||
We can pipeline this into a command that gets the contents of one of the columns:
|
We can pipeline this into a command that gets the contents of one of the columns:
|
||||||
|
|
||||||
```
|
```
|
||||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package
|
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package
|
||||||
━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━
|
─────────────────┬────────────────────────────┬─────────┬─────────┬──────┬─────────
|
||||||
authors │ description │ edition │ license │ name │ version
|
authors │ description │ edition │ license │ name │ version
|
||||||
─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼─────────
|
─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼─────────
|
||||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.6.1
|
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.9.0
|
||||||
━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━
|
─────────────────┴────────────────────────────┴─────────┴─────────┴──────┴─────────
|
||||||
```
|
```
|
||||||
|
|
||||||
Finally, we can use commands outside of Nu once we have the data we want:
|
Finally, we can use commands outside of Nu once we have the data we want:
|
||||||
|
|
||||||
```
|
```
|
||||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it
|
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it
|
||||||
0.6.1
|
0.9.0
|
||||||
```
|
```
|
||||||
|
|
||||||
Here we use the variable `$it` to refer to the value being piped to the external command.
|
Here we use the variable `$it` to refer to the value being piped to the external command.
|
||||||
@ -190,13 +192,14 @@ Here we use the variable `$it` to refer to the value being piped to the external
|
|||||||
|
|
||||||
Nu has early support for configuring the shell. It currently supports the following settings:
|
Nu has early support for configuring the shell. It currently supports the following settings:
|
||||||
|
|
||||||
| Variable | Type | Description |
|
| Variable | Type | Description |
|
||||||
| ------------- | ------------- | ----- |
|
| --------------- | -------------------- | -------------------------------------------------------------- |
|
||||||
| path | table of strings | PATH to use to find binaries |
|
| path | table of strings | PATH to use to find binaries |
|
||||||
| env | row | the environment variables to pass to external commands |
|
| env | row | the environment variables to pass to external commands |
|
||||||
| ctrlc_exit | boolean | whether or not to exit Nu after multiple ctrl-c presses |
|
| ctrlc_exit | boolean | whether or not to exit Nu after multiple ctrl-c presses |
|
||||||
| table_mode | "light" or other | enable lightweight or normal tables |
|
| table_mode | "light" or other | enable lightweight or normal tables |
|
||||||
| edit_mode | "vi" or "emacs" | changes line editing to "vi" or "emacs" mode |
|
| edit_mode | "vi" or "emacs" | changes line editing to "vi" or "emacs" mode |
|
||||||
|
| completion_mode | "circular" or "list" | changes completion type to "circular" (default) or "list" mode |
|
||||||
|
|
||||||
To set one of these variables, you can use `config --set`. For example:
|
To set one of these variables, you can use `config --set`. For example:
|
||||||
|
|
||||||
@ -236,106 +239,8 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
|||||||
* Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
|
* Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
|
||||||
|
|
||||||
# Commands
|
# Commands
|
||||||
## Initial commands
|
|
||||||
| command | description |
|
|
||||||
| ------------- | ------------- |
|
|
||||||
| cd path | Change to a new path |
|
|
||||||
| cp source path | Copy files |
|
|
||||||
| date (--utc) | Get the current datetime |
|
|
||||||
| fetch url | Fetch contents from a url and retrieve data as a table if possible |
|
|
||||||
| help | Display help information about commands |
|
|
||||||
| ls (path) | View the contents of the current or given path |
|
|
||||||
| mkdir path | Make directories, creates intermediary directories as required. |
|
|
||||||
| mv source target | Move files or directories. |
|
|
||||||
| open filename | Load a file into a cell, convert to table if possible (avoid by appending '--raw') |
|
|
||||||
| post url body (--user <user>) (--password <password>) | Post content to a url and retrieve data as a table if possible |
|
|
||||||
| ps | View current processes |
|
|
||||||
| sys | View information about the current system |
|
|
||||||
| which filename | Finds a program file. |
|
|
||||||
| rm {file or directory} | Remove a file, (for removing directory append '--recursive') |
|
|
||||||
| version | Display Nu version |
|
|
||||||
|
|
||||||
## Shell commands
|
You can find a list of Nu commands, complete with documentation, in [quick command references](https://www.nushell.sh/documentation.html#quick-command-references).
|
||||||
| command | description |
|
|
||||||
| ------- | ----------- |
|
|
||||||
| exit (--now) | Exit the current shell (or all shells) |
|
|
||||||
| enter (path) | Create a new shell and begin at this path |
|
|
||||||
| p | Go to previous shell |
|
|
||||||
| n | Go to next shell |
|
|
||||||
| shells | Display the list of current shells |
|
|
||||||
|
|
||||||
## Filters on tables (structured data)
|
|
||||||
| command | description |
|
|
||||||
| ------------- | ------------- |
|
|
||||||
| append row-data | Append a row to the end of the table |
|
|
||||||
| compact ...columns | Remove rows where given columns are empty |
|
|
||||||
| count | Show the total number of rows |
|
|
||||||
| default column row-data | Sets a default row's column if missing |
|
|
||||||
| edit column-or-column-path value | Edit an existing column to have a new value |
|
|
||||||
| embed column | Creates a new table of one column with the given name, and places the current table inside of it |
|
|
||||||
| first amount | Show only the first number of rows |
|
|
||||||
| format pattern | Format table row data as a string following the given pattern |
|
|
||||||
| get column-or-column-path | Open column and get data from the corresponding cells |
|
|
||||||
| group-by column | Creates a new table with the data from the table rows grouped by the column given |
|
|
||||||
| histogram column ...column-names | Creates a new table with a histogram based on the column name passed in, optionally give the frequency column name
|
|
||||||
| inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table |
|
|
||||||
| insert column-or-column-path value | Insert a new column to the table |
|
|
||||||
| last amount | Show only the last number of rows |
|
|
||||||
| nth ...row-numbers | Return only the selected rows |
|
|
||||||
| pick ...columns | Down-select table to only these columns |
|
|
||||||
| pivot --header-row <headers> | Pivot the tables, making columns into rows and vice versa |
|
|
||||||
| prepend row-data | Prepend a row to the beginning of the table |
|
|
||||||
| reject ...columns | Remove the given columns from the table |
|
|
||||||
| reverse | Reverses the table. |
|
|
||||||
| skip amount | Skip a number of rows |
|
|
||||||
| skip-while condition | Skips rows while the condition matches |
|
|
||||||
| split-by column | Creates a new table with the data from the inner tables splitted by the column given |
|
|
||||||
| sort-by ...columns | Sort by the given columns |
|
|
||||||
| str (column) | Apply string function. Optionally use the column of a table |
|
|
||||||
| sum | Sum a column of values |
|
|
||||||
| tags | Read the tags (metadata) for values |
|
|
||||||
| to-bson | Convert table into .bson binary data |
|
|
||||||
| to-csv | Convert table into .csv text |
|
|
||||||
| to-json | Convert table into .json text |
|
|
||||||
| to-sqlite | Convert table to sqlite .db binary data |
|
|
||||||
| to-toml | Convert table into .toml text |
|
|
||||||
| to-tsv | Convert table into .tsv text |
|
|
||||||
| to-url | Convert table to a urlencoded string |
|
|
||||||
| to-yaml | Convert table into .yaml text |
|
|
||||||
| where condition | Filter table to match the condition |
|
|
||||||
|
|
||||||
## Filters on text (unstructured data)
|
|
||||||
| command | description |
|
|
||||||
| ------------- | ------------- |
|
|
||||||
| from-bson | Parse binary data as .bson and create table |
|
|
||||||
| from-csv | Parse text as .csv and create table |
|
|
||||||
| from-ini | Parse text as .ini and create table |
|
|
||||||
| from-json | Parse text as .json and create table |
|
|
||||||
| from-sqlite | Parse binary data as sqlite .db and create table |
|
|
||||||
| from-ssv --minimum-spaces <minimum number of spaces to count as a separator> | Parse text as space-separated values and create table |
|
|
||||||
| from-toml | Parse text as .toml and create table |
|
|
||||||
| from-tsv | Parse text as .tsv and create table |
|
|
||||||
| from-url | Parse urlencoded string and create a table |
|
|
||||||
| from-xml | Parse text as .xml and create a table |
|
|
||||||
| from-yaml | Parse text as a .yaml/.yml and create a table |
|
|
||||||
| lines | Split single string into rows, one per line |
|
|
||||||
| parse pattern | Convert text to a table by matching the given pattern |
|
|
||||||
| size | Gather word count statistics on the text |
|
|
||||||
| split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names |
|
|
||||||
| split-row sep | Split row contents over multiple rows via the separator |
|
|
||||||
| trim | Trim leading and following whitespace from text data |
|
|
||||||
| {external-command} $it | Run external command with given arguments, replacing $it with each row text |
|
|
||||||
|
|
||||||
## Consuming commands
|
|
||||||
| command | description |
|
|
||||||
| ------------- | ------------- |
|
|
||||||
| autoview | View the contents of the pipeline as a table or list |
|
|
||||||
| binaryview | Autoview of binary data (optional feature) |
|
|
||||||
| clip | Copy the contents of the pipeline to the copy/paste buffer (optional feature) |
|
|
||||||
| save filename | Save the contents of the pipeline to a file |
|
|
||||||
| table | View the contents of the pipeline as a table |
|
|
||||||
| textview | Autoview of text data |
|
|
||||||
| tree | View the contents of the pipeline as a tree (optional feature) |
|
|
||||||
|
|
||||||
# License
|
# License
|
||||||
|
|
||||||
|
8
TODO.md
8
TODO.md
@ -50,3 +50,11 @@ textview in own crate
|
|||||||
Combine atomic and atomic_parse in parser
|
Combine atomic and atomic_parse in parser
|
||||||
|
|
||||||
at_end_possible_ws needs to be comment and separator sensitive
|
at_end_possible_ws needs to be comment and separator sensitive
|
||||||
|
|
||||||
|
Eliminate unnecessary `nodes` parser
|
||||||
|
|
||||||
|
#[derive(HasSpan)]
|
||||||
|
|
||||||
|
Figure out a solution for the duplication in stuff like NumberShape vs. NumberExpressionShape
|
||||||
|
|
||||||
|
use `struct Expander` from signature.rs
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-build"
|
name = "nu-build"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "Core build system for nushell"
|
description = "Core build system for nushell"
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-errors"
|
name = "nu-errors"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "Core error subsystem for Nushell"
|
description = "Core error subsystem for Nushell"
|
||||||
@ -10,7 +10,7 @@ license = "MIT"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
|
|
||||||
ansi_term = "0.12.1"
|
ansi_term = "0.12.1"
|
||||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||||
@ -21,12 +21,12 @@ num-traits = "0.2.10"
|
|||||||
serde = { version = "1.0.103", features = ["derive"] }
|
serde = { version = "1.0.103", features = ["derive"] }
|
||||||
nom = "5.0.1"
|
nom = "5.0.1"
|
||||||
nom_locate = "1.0.0"
|
nom_locate = "1.0.0"
|
||||||
|
getset = "0.0.9"
|
||||||
|
|
||||||
# implement conversions
|
# implement conversions
|
||||||
subprocess = "0.1.18"
|
|
||||||
serde_yaml = "0.8"
|
serde_yaml = "0.8"
|
||||||
toml = "0.5.5"
|
toml = "0.5.5"
|
||||||
serde_json = "1.0.44"
|
serde_json = "1.0.44"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
use ansi_term::Color;
|
use ansi_term::Color;
|
||||||
use bigdecimal::BigDecimal;
|
use bigdecimal::BigDecimal;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
|
use getset::Getters;
|
||||||
use language_reporting::{Diagnostic, Label, Severity};
|
use language_reporting::{Diagnostic, Label, Severity};
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, Span, Spanned, SpannedItem, TracableContext};
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasFallibleSpan, PrettyDebug, Span, Spanned, SpannedItem, TracableContext,
|
||||||
|
};
|
||||||
use num_bigint::BigInt;
|
use num_bigint::BigInt;
|
||||||
use num_traits::ToPrimitive;
|
use num_traits::ToPrimitive;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -12,16 +15,16 @@ use std::ops::Range;
|
|||||||
/// A structured reason for a ParseError. Note that parsing in nu is more like macro expansion in
|
/// A structured reason for a ParseError. Note that parsing in nu is more like macro expansion in
|
||||||
/// other languages, so the kinds of errors that can occur during parsing are more contextual than
|
/// other languages, so the kinds of errors that can occur during parsing are more contextual than
|
||||||
/// you might expect.
|
/// you might expect.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub enum ParseErrorReason {
|
pub enum ParseErrorReason {
|
||||||
/// The parser encountered an EOF rather than what it was expecting
|
/// The parser encountered an EOF rather than what it was expecting
|
||||||
Eof { expected: &'static str, span: Span },
|
Eof { expected: String, span: Span },
|
||||||
/// The parser expected to see the end of a token stream (possibly the token
|
/// The parser expected to see the end of a token stream (possibly the token
|
||||||
/// stream from inside a delimited token node), but found something else.
|
/// stream from inside a delimited token node), but found something else.
|
||||||
ExtraTokens { actual: Spanned<String> },
|
ExtraTokens { actual: Spanned<String> },
|
||||||
/// The parser encountered something other than what it was expecting
|
/// The parser encountered something other than what it was expecting
|
||||||
Mismatch {
|
Mismatch {
|
||||||
expected: &'static str,
|
expected: String,
|
||||||
actual: Spanned<String>,
|
actual: Spanned<String>,
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -37,16 +40,20 @@ pub enum ParseErrorReason {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A newtype for `ParseErrorReason`
|
/// A newtype for `ParseErrorReason`
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, Eq, PartialEq, Getters)]
|
||||||
pub struct ParseError {
|
pub struct ParseError {
|
||||||
|
#[get = "pub"]
|
||||||
reason: ParseErrorReason,
|
reason: ParseErrorReason,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ParseError {
|
impl ParseError {
|
||||||
/// Construct a [ParseErrorReason::Eof](ParseErrorReason::Eof)
|
/// Construct a [ParseErrorReason::Eof](ParseErrorReason::Eof)
|
||||||
pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError {
|
pub fn unexpected_eof(expected: impl Into<String>, span: Span) -> ParseError {
|
||||||
ParseError {
|
ParseError {
|
||||||
reason: ParseErrorReason::Eof { expected, span },
|
reason: ParseErrorReason::Eof {
|
||||||
|
expected: expected.into(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -62,12 +69,12 @@ impl ParseError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Construct a [ParseErrorReason::Mismatch](ParseErrorReason::Mismatch)
|
/// Construct a [ParseErrorReason::Mismatch](ParseErrorReason::Mismatch)
|
||||||
pub fn mismatch(expected: &'static str, actual: Spanned<impl Into<String>>) -> ParseError {
|
pub fn mismatch(expected: impl Into<String>, actual: Spanned<impl Into<String>>) -> ParseError {
|
||||||
let Spanned { span, item } = actual;
|
let Spanned { span, item } = actual;
|
||||||
|
|
||||||
ParseError {
|
ParseError {
|
||||||
reason: ParseErrorReason::Mismatch {
|
reason: ParseErrorReason::Mismatch {
|
||||||
expected,
|
expected: expected.into(),
|
||||||
actual: item.into().spanned(span),
|
actual: item.into().spanned(span),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -304,6 +311,7 @@ impl serde::de::Error for ShellError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ShellError {
|
impl ShellError {
|
||||||
|
/// An error that describes a mismatch between the given type and the expected type
|
||||||
pub fn type_error(
|
pub fn type_error(
|
||||||
expected: impl Into<String>,
|
expected: impl Into<String>,
|
||||||
actual: Spanned<impl Into<String>>,
|
actual: Spanned<impl Into<String>>,
|
||||||
@ -449,8 +457,8 @@ impl ShellError {
|
|||||||
format!(
|
format!(
|
||||||
"{} requires {}{}",
|
"{} requires {}{}",
|
||||||
Color::Cyan.paint(&command.item),
|
Color::Cyan.paint(&command.item),
|
||||||
Color::Black.bold().paint("--"),
|
Color::Green.bold().paint("--"),
|
||||||
Color::Black.bold().paint(name)
|
Color::Green.bold().paint(name)
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.with_label(Label::new_primary(command.span)),
|
.with_label(Label::new_primary(command.span)),
|
||||||
@ -470,8 +478,8 @@ impl ShellError {
|
|||||||
format!(
|
format!(
|
||||||
"{} is missing value for flag {}{}",
|
"{} is missing value for flag {}{}",
|
||||||
Color::Cyan.paint(&command.item),
|
Color::Cyan.paint(&command.item),
|
||||||
Color::Black.bold().paint("--"),
|
Color::Green.bold().paint("--"),
|
||||||
Color::Black.bold().paint(name)
|
Color::Green.bold().paint(name)
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.with_label(Label::new_primary(command.span)),
|
.with_label(Label::new_primary(command.span)),
|
||||||
@ -727,6 +735,30 @@ impl ProximateShellError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl HasFallibleSpan for ShellError {
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
self.error.maybe_span()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasFallibleSpan for ProximateShellError {
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
Some(match self {
|
||||||
|
ProximateShellError::SyntaxError { problem } => problem.span,
|
||||||
|
ProximateShellError::UnexpectedEof { span, .. } => *span,
|
||||||
|
ProximateShellError::TypeError { actual, .. } => actual.span,
|
||||||
|
ProximateShellError::MissingProperty { subpath, .. } => subpath.span,
|
||||||
|
ProximateShellError::InvalidIntegerIndex { subpath, .. } => subpath.span,
|
||||||
|
ProximateShellError::MissingValue { span, .. } => return *span,
|
||||||
|
ProximateShellError::ArgumentError { command, .. } => command.span,
|
||||||
|
ProximateShellError::RangeError { actual_kind, .. } => actual_kind.span,
|
||||||
|
ProximateShellError::Diagnostic(_) => return None,
|
||||||
|
ProximateShellError::CoerceError { left, right } => left.span.until(right.span),
|
||||||
|
ProximateShellError::UntaggedRuntimeError { .. } => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct ShellDiagnostic {
|
pub struct ShellDiagnostic {
|
||||||
pub(crate) diagnostic: Diagnostic<Span>,
|
pub(crate) diagnostic: Diagnostic<Span>,
|
||||||
@ -789,12 +821,6 @@ impl std::convert::From<std::io::Error> for ShellError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::convert::From<subprocess::PopenError> for ShellError {
|
|
||||||
fn from(input: subprocess::PopenError) -> ShellError {
|
|
||||||
ShellError::untagged_runtime_error(format!("{}", input))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::convert::From<serde_yaml::Error> for ShellError {
|
impl std::convert::From<serde_yaml::Error> for ShellError {
|
||||||
fn from(input: serde_yaml::Error) -> ShellError {
|
fn from(input: serde_yaml::Error) -> ShellError {
|
||||||
ShellError::untagged_runtime_error(format!("{:?}", input))
|
ShellError::untagged_runtime_error(format!("{:?}", input))
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-macros"
|
name = "nu-macros"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "Core macros for building Nushell"
|
description = "Core macros for building Nushell"
|
||||||
@ -10,4 +10,4 @@ license = "MIT"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-parser"
|
name = "nu-parser"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "Core parser used in Nushell"
|
description = "Core parser used in Nushell"
|
||||||
@ -10,9 +10,9 @@ license = "MIT"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
|
|
||||||
pretty_env_logger = "0.3.1"
|
pretty_env_logger = "0.3.1"
|
||||||
pretty = "0.5.2"
|
pretty = "0.5.2"
|
||||||
@ -41,7 +41,7 @@ enumflags2 = "0.6.2"
|
|||||||
pretty_assertions = "0.6.1"
|
pretty_assertions = "0.6.1"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
stable = []
|
stable = []
|
||||||
|
@ -3,7 +3,6 @@ pub mod classified;
|
|||||||
use crate::commands::classified::external::{ExternalArg, ExternalArgs, ExternalCommand};
|
use crate::commands::classified::external::{ExternalArg, ExternalArgs, ExternalCommand};
|
||||||
use crate::commands::classified::ClassifiedCommand;
|
use crate::commands::classified::ClassifiedCommand;
|
||||||
use crate::hir::expand_external_tokens::ExternalTokensShape;
|
use crate::hir::expand_external_tokens::ExternalTokensShape;
|
||||||
use crate::hir::syntax_shape::{expand_syntax, ExpandContext};
|
|
||||||
use crate::hir::tokens_iterator::TokensIterator;
|
use crate::hir::tokens_iterator::TokensIterator;
|
||||||
use nu_errors::ParseError;
|
use nu_errors::ParseError;
|
||||||
use nu_source::{Spanned, Tagged};
|
use nu_source::{Spanned, Tagged};
|
||||||
@ -13,10 +12,10 @@ use nu_source::{Spanned, Tagged};
|
|||||||
// strings.
|
// strings.
|
||||||
pub(crate) fn external_command(
|
pub(crate) fn external_command(
|
||||||
tokens: &mut TokensIterator,
|
tokens: &mut TokensIterator,
|
||||||
context: &ExpandContext,
|
|
||||||
name: Tagged<&str>,
|
name: Tagged<&str>,
|
||||||
) -> Result<ClassifiedCommand, ParseError> {
|
) -> Result<ClassifiedCommand, ParseError> {
|
||||||
let Spanned { item, span } = expand_syntax(&ExternalTokensShape, tokens, context)?.tokens;
|
let Spanned { item, span } = tokens.expand_infallible(ExternalTokensShape).tokens;
|
||||||
|
let full_span = name.span().until(span);
|
||||||
|
|
||||||
Ok(ClassifiedCommand::External(ExternalCommand {
|
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||||
name: name.to_string(),
|
name: name.to_string(),
|
||||||
@ -29,7 +28,7 @@ pub(crate) fn external_command(
|
|||||||
arg: x.item.clone(),
|
arg: x.item.clone(),
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
span,
|
span: full_span,
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
@ -4,18 +4,20 @@ pub mod internal;
|
|||||||
use crate::commands::classified::external::ExternalCommand;
|
use crate::commands::classified::external::ExternalCommand;
|
||||||
use crate::commands::classified::internal::InternalCommand;
|
use crate::commands::classified::internal::InternalCommand;
|
||||||
use crate::hir;
|
use crate::hir;
|
||||||
use crate::parse::token_tree::TokenNode;
|
use crate::parse::token_tree::SpannedToken;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
|
use nu_errors::ParseError;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub enum ClassifiedCommand {
|
pub enum ClassifiedCommand {
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
Expr(TokenNode),
|
Expr(SpannedToken),
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
Dynamic(hir::Call),
|
Dynamic(hir::Call),
|
||||||
Internal(InternalCommand),
|
Internal(InternalCommand),
|
||||||
External(ExternalCommand),
|
External(ExternalCommand),
|
||||||
|
Error(ParseError),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for ClassifiedCommand {
|
impl PrettyDebugWithSource for ClassifiedCommand {
|
||||||
@ -23,6 +25,7 @@ impl PrettyDebugWithSource for ClassifiedCommand {
|
|||||||
match self {
|
match self {
|
||||||
ClassifiedCommand::Expr(token) => b::typed("command", token.pretty_debug(source)),
|
ClassifiedCommand::Expr(token) => b::typed("command", token.pretty_debug(source)),
|
||||||
ClassifiedCommand::Dynamic(call) => b::typed("command", call.pretty_debug(source)),
|
ClassifiedCommand::Dynamic(call) => b::typed("command", call.pretty_debug(source)),
|
||||||
|
ClassifiedCommand::Error(_) => b::error("no command"),
|
||||||
ClassifiedCommand::Internal(internal) => internal.pretty_debug(source),
|
ClassifiedCommand::Internal(internal) => internal.pretty_debug(source),
|
||||||
ClassifiedCommand::External(external) => external.pretty_debug(source),
|
ClassifiedCommand::External(external) => external.pretty_debug(source),
|
||||||
}
|
}
|
||||||
@ -35,6 +38,7 @@ impl HasSpan for ClassifiedCommand {
|
|||||||
ClassifiedCommand::Expr(node) => node.span(),
|
ClassifiedCommand::Expr(node) => node.span(),
|
||||||
ClassifiedCommand::Internal(command) => command.span(),
|
ClassifiedCommand::Internal(command) => command.span(),
|
||||||
ClassifiedCommand::Dynamic(call) => call.span,
|
ClassifiedCommand::Dynamic(call) => call.span,
|
||||||
|
ClassifiedCommand::Error(_) => Span::unknown(),
|
||||||
ClassifiedCommand::External(command) => command.span(),
|
ClassifiedCommand::External(command) => command.span(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -62,6 +66,9 @@ impl std::ops::Deref for Commands {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ClassifiedPipeline {
|
pub struct ClassifiedPipeline {
|
||||||
pub commands: Commands,
|
pub commands: Commands,
|
||||||
|
// this is not a Result to make it crystal clear that these shapes
|
||||||
|
// aren't intended to be used directly with `?`
|
||||||
|
pub failed: Option<nu_errors::ParseError>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ClassifiedPipeline {
|
impl ClassifiedPipeline {
|
||||||
@ -71,6 +78,7 @@ impl ClassifiedPipeline {
|
|||||||
list,
|
list,
|
||||||
span: span.into(),
|
span: span.into(),
|
||||||
},
|
},
|
||||||
|
failed: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,16 @@ pub struct ExternalArg {
|
|||||||
pub tag: Tag,
|
pub tag: Tag,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ExternalArg {
|
||||||
|
pub fn has(&self, name: &str) -> bool {
|
||||||
|
self.arg == name
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_it(&self) -> bool {
|
||||||
|
self.has("$it")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl std::ops::Deref for ExternalArg {
|
impl std::ops::Deref for ExternalArg {
|
||||||
type Target = str;
|
type Target = str;
|
||||||
|
|
||||||
@ -42,6 +52,12 @@ pub struct ExternalCommand {
|
|||||||
pub args: ExternalArgs,
|
pub args: ExternalArgs,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ExternalCommand {
|
||||||
|
pub fn has_it_argument(&self) -> bool {
|
||||||
|
self.args.iter().any(|arg| arg.has("$it"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyDebug for ExternalCommand {
|
impl PrettyDebug for ExternalCommand {
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
b::typed(
|
b::typed(
|
||||||
|
@ -5,7 +5,6 @@ pub(crate) mod external_command;
|
|||||||
pub(crate) mod named;
|
pub(crate) mod named;
|
||||||
pub(crate) mod path;
|
pub(crate) mod path;
|
||||||
pub(crate) mod range;
|
pub(crate) mod range;
|
||||||
pub(crate) mod signature;
|
|
||||||
pub mod syntax_shape;
|
pub mod syntax_shape;
|
||||||
pub(crate) mod tokens_iterator;
|
pub(crate) mod tokens_iterator;
|
||||||
|
|
||||||
@ -17,17 +16,17 @@ use derive_new::new;
|
|||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_protocol::{PathMember, ShellTypeName};
|
use nu_protocol::{PathMember, ShellTypeName};
|
||||||
use nu_source::{
|
use nu_source::{
|
||||||
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebug, PrettyDebugRefineKind,
|
||||||
|
PrettyDebugWithSource, Span, Spanned,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::parse::tokens::RawNumber;
|
use crate::parse::number::RawNumber;
|
||||||
|
|
||||||
pub(crate) use self::binary::Binary;
|
pub(crate) use self::binary::Binary;
|
||||||
pub(crate) use self::path::Path;
|
pub(crate) use self::path::Path;
|
||||||
pub(crate) use self::range::Range;
|
pub(crate) use self::range::Range;
|
||||||
pub(crate) use self::syntax_shape::ExpandContext;
|
|
||||||
pub(crate) use self::tokens_iterator::TokensIterator;
|
pub(crate) use self::tokens_iterator::TokensIterator;
|
||||||
|
|
||||||
pub use self::external_command::ExternalCommand;
|
pub use self::external_command::ExternalCommand;
|
||||||
@ -63,44 +62,76 @@ impl PrettyDebugWithSource for Signature {
|
|||||||
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
||||||
pub struct Call {
|
pub struct Call {
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub head: Box<Expression>,
|
pub head: Box<SpannedExpression>,
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub positional: Option<Vec<Expression>>,
|
pub positional: Option<Vec<SpannedExpression>>,
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub named: Option<NamedArguments>,
|
pub named: Option<NamedArguments>,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Call {
|
||||||
|
pub fn switch_preset(&self, switch: &str) -> bool {
|
||||||
|
self.named
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|n| n.get(switch))
|
||||||
|
.map(|t| match t {
|
||||||
|
NamedValue::PresentSwitch(_) => true,
|
||||||
|
_ => false,
|
||||||
|
})
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Call {
|
impl PrettyDebugWithSource for Call {
|
||||||
|
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||||
|
match refine {
|
||||||
|
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||||
|
PrettyDebugRefineKind::WithContext => {
|
||||||
|
self.head
|
||||||
|
.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source)
|
||||||
|
+ b::preceded_option(
|
||||||
|
Some(b::space()),
|
||||||
|
self.positional.as_ref().map(|pos| {
|
||||||
|
b::intersperse(
|
||||||
|
pos.iter().map(|expr| {
|
||||||
|
expr.refined_pretty_debug(
|
||||||
|
PrettyDebugRefineKind::WithContext,
|
||||||
|
source,
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
b::space(),
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
+ b::preceded_option(
|
||||||
|
Some(b::space()),
|
||||||
|
self.named.as_ref().map(|named| {
|
||||||
|
named.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source)
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::delimit(
|
b::typed(
|
||||||
"(",
|
"call",
|
||||||
self.head.pretty_debug(source)
|
self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source),
|
||||||
+ b::preceded_option(
|
|
||||||
Some(b::space()),
|
|
||||||
self.positional.as_ref().map(|pos| {
|
|
||||||
b::intersperse(pos.iter().map(|expr| expr.pretty_debug(source)), b::space())
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
+ b::preceded_option(
|
|
||||||
Some(b::space()),
|
|
||||||
self.named.as_ref().map(|named| named.pretty_debug(source)),
|
|
||||||
),
|
|
||||||
")",
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub enum RawExpression {
|
pub enum Expression {
|
||||||
Literal(Literal),
|
Literal(Literal),
|
||||||
ExternalWord,
|
ExternalWord,
|
||||||
Synthetic(Synthetic),
|
Synthetic(Synthetic),
|
||||||
Variable(Variable),
|
Variable(Variable),
|
||||||
Binary(Box<Binary>),
|
Binary(Box<Binary>),
|
||||||
Range(Box<Range>),
|
Range(Box<Range>),
|
||||||
Block(Vec<Expression>),
|
Block(Vec<SpannedExpression>),
|
||||||
List(Vec<Expression>),
|
List(Vec<SpannedExpression>),
|
||||||
Path(Box<Path>),
|
Path(Box<Path>),
|
||||||
|
|
||||||
FilePath(PathBuf),
|
FilePath(PathBuf),
|
||||||
@ -110,22 +141,22 @@ pub enum RawExpression {
|
|||||||
Boolean(bool),
|
Boolean(bool),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ShellTypeName for RawExpression {
|
impl ShellTypeName for Expression {
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
RawExpression::Literal(literal) => literal.type_name(),
|
Expression::Literal(literal) => literal.type_name(),
|
||||||
RawExpression::Synthetic(synthetic) => synthetic.type_name(),
|
Expression::Synthetic(synthetic) => synthetic.type_name(),
|
||||||
RawExpression::Command(..) => "command",
|
Expression::Command(..) => "command",
|
||||||
RawExpression::ExternalWord => "external word",
|
Expression::ExternalWord => "external word",
|
||||||
RawExpression::FilePath(..) => "file path",
|
Expression::FilePath(..) => "file path",
|
||||||
RawExpression::Variable(..) => "variable",
|
Expression::Variable(..) => "variable",
|
||||||
RawExpression::List(..) => "list",
|
Expression::List(..) => "list",
|
||||||
RawExpression::Binary(..) => "binary",
|
Expression::Binary(..) => "binary",
|
||||||
RawExpression::Range(..) => "range",
|
Expression::Range(..) => "range",
|
||||||
RawExpression::Block(..) => "block",
|
Expression::Block(..) => "block",
|
||||||
RawExpression::Path(..) => "variable path",
|
Expression::Path(..) => "variable path",
|
||||||
RawExpression::Boolean(..) => "boolean",
|
Expression::Boolean(..) => "boolean",
|
||||||
RawExpression::ExternalCommand(..) => "external",
|
Expression::ExternalCommand(..) => "external",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -143,16 +174,24 @@ impl ShellTypeName for Synthetic {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RawExpression {
|
impl IntoSpanned for Expression {
|
||||||
pub fn into_expr(self, span: impl Into<Span>) -> Expression {
|
type Output = SpannedExpression;
|
||||||
Expression {
|
|
||||||
|
fn into_spanned(self, span: impl Into<Span>) -> Self::Output {
|
||||||
|
SpannedExpression {
|
||||||
expr: self,
|
expr: self,
|
||||||
span: span.into(),
|
span: span.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn into_unspanned_expr(self) -> Expression {
|
impl Expression {
|
||||||
Expression {
|
pub fn into_expr(self, span: impl Into<Span>) -> SpannedExpression {
|
||||||
|
self.into_spanned(span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_unspanned_expr(self) -> SpannedExpression {
|
||||||
|
SpannedExpression {
|
||||||
expr: self,
|
expr: self,
|
||||||
span: Span::unknown(),
|
span: Span::unknown(),
|
||||||
}
|
}
|
||||||
@ -160,40 +199,93 @@ impl RawExpression {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub struct Expression {
|
pub struct SpannedExpression {
|
||||||
pub expr: RawExpression,
|
pub expr: Expression,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::ops::Deref for Expression {
|
impl std::ops::Deref for SpannedExpression {
|
||||||
type Target = RawExpression;
|
type Target = Expression;
|
||||||
|
|
||||||
fn deref(&self) -> &RawExpression {
|
fn deref(&self) -> &Expression {
|
||||||
&self.expr
|
&self.expr
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasSpan for Expression {
|
impl HasSpan for SpannedExpression {
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
self.span
|
self.span
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Expression {
|
impl ShellTypeName for SpannedExpression {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
self.expr.type_name()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for SpannedExpression {
|
||||||
|
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||||
|
match refine {
|
||||||
|
PrettyDebugRefineKind::ContextFree => self.refined_pretty_debug(refine, source),
|
||||||
|
PrettyDebugRefineKind::WithContext => match &self.expr {
|
||||||
|
Expression::Literal(literal) => literal
|
||||||
|
.clone()
|
||||||
|
.into_spanned(self.span)
|
||||||
|
.refined_pretty_debug(refine, source),
|
||||||
|
Expression::ExternalWord => {
|
||||||
|
b::delimit("e\"", b::primitive(self.span.slice(source)), "\"").group()
|
||||||
|
}
|
||||||
|
Expression::Synthetic(s) => match s {
|
||||||
|
Synthetic::String(_) => {
|
||||||
|
b::delimit("s\"", b::primitive(self.span.slice(source)), "\"").group()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)),
|
||||||
|
Expression::Variable(Variable::It(_)) => b::keyword("$it"),
|
||||||
|
Expression::Binary(binary) => binary.pretty_debug(source),
|
||||||
|
Expression::Range(range) => range.pretty_debug(source),
|
||||||
|
Expression::Block(_) => b::opaque("block"),
|
||||||
|
Expression::List(list) => b::delimit(
|
||||||
|
"[",
|
||||||
|
b::intersperse(
|
||||||
|
list.iter()
|
||||||
|
.map(|item| item.refined_pretty_debug(refine, source)),
|
||||||
|
b::space(),
|
||||||
|
),
|
||||||
|
"]",
|
||||||
|
),
|
||||||
|
Expression::Path(path) => path.pretty_debug(source),
|
||||||
|
Expression::FilePath(path) => b::typed("path", b::primitive(path.display())),
|
||||||
|
Expression::ExternalCommand(external) => {
|
||||||
|
b::keyword("^") + b::keyword(external.name.slice(source))
|
||||||
|
}
|
||||||
|
Expression::Command(command) => b::keyword(command.slice(source)),
|
||||||
|
Expression::Boolean(boolean) => match boolean {
|
||||||
|
true => b::primitive("$yes"),
|
||||||
|
false => b::primitive("$no"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match &self.expr {
|
match &self.expr {
|
||||||
RawExpression::Literal(literal) => literal.spanned(self.span).pretty_debug(source),
|
Expression::Literal(literal) => {
|
||||||
RawExpression::ExternalWord => {
|
literal.clone().into_spanned(self.span).pretty_debug(source)
|
||||||
|
}
|
||||||
|
Expression::ExternalWord => {
|
||||||
b::typed("external word", b::primitive(self.span.slice(source)))
|
b::typed("external word", b::primitive(self.span.slice(source)))
|
||||||
}
|
}
|
||||||
RawExpression::Synthetic(s) => match s {
|
Expression::Synthetic(s) => match s {
|
||||||
Synthetic::String(s) => b::typed("synthetic", b::primitive(format!("{:?}", s))),
|
Synthetic::String(s) => b::typed("synthetic", b::primitive(format!("{:?}", s))),
|
||||||
},
|
},
|
||||||
RawExpression::Variable(_) => b::keyword(self.span.slice(source)),
|
Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)),
|
||||||
RawExpression::Binary(binary) => binary.pretty_debug(source),
|
Expression::Variable(Variable::It(_)) => b::keyword("$it"),
|
||||||
RawExpression::Range(range) => range.pretty_debug(source),
|
Expression::Binary(binary) => binary.pretty_debug(source),
|
||||||
RawExpression::Block(_) => b::opaque("block"),
|
Expression::Range(range) => range.pretty_debug(source),
|
||||||
RawExpression::List(list) => b::delimit(
|
Expression::Block(_) => b::opaque("block"),
|
||||||
|
Expression::List(list) => b::delimit(
|
||||||
"[",
|
"[",
|
||||||
b::intersperse(
|
b::intersperse(
|
||||||
list.iter().map(|item| item.pretty_debug(source)),
|
list.iter().map(|item| item.pretty_debug(source)),
|
||||||
@ -201,16 +293,16 @@ impl PrettyDebugWithSource for Expression {
|
|||||||
),
|
),
|
||||||
"]",
|
"]",
|
||||||
),
|
),
|
||||||
RawExpression::Path(path) => path.pretty_debug(source),
|
Expression::Path(path) => path.pretty_debug(source),
|
||||||
RawExpression::FilePath(path) => b::typed("path", b::primitive(path.display())),
|
Expression::FilePath(path) => b::typed("path", b::primitive(path.display())),
|
||||||
RawExpression::ExternalCommand(external) => b::typed(
|
Expression::ExternalCommand(external) => b::typed(
|
||||||
"external command",
|
"command",
|
||||||
b::primitive(external.name.slice(source)),
|
b::keyword("^") + b::primitive(external.name.slice(source)),
|
||||||
),
|
),
|
||||||
RawExpression::Command(command) => {
|
Expression::Command(command) => {
|
||||||
b::typed("command", b::primitive(command.slice(source)))
|
b::typed("command", b::primitive(command.slice(source)))
|
||||||
}
|
}
|
||||||
RawExpression::Boolean(boolean) => match boolean {
|
Expression::Boolean(boolean) => match boolean {
|
||||||
true => b::primitive("$yes"),
|
true => b::primitive("$yes"),
|
||||||
false => b::primitive("$no"),
|
false => b::primitive("$no"),
|
||||||
},
|
},
|
||||||
@ -219,117 +311,91 @@ impl PrettyDebugWithSource for Expression {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Expression {
|
impl Expression {
|
||||||
pub fn number(i: impl Into<Number>, span: impl Into<Span>) -> Expression {
|
pub fn number(i: impl Into<Number>) -> Expression {
|
||||||
let span = span.into();
|
Expression::Literal(Literal::Number(i.into()))
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::Number(i.into()).into_literal(span)).into_expr(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn size(i: impl Into<Number>, unit: impl Into<Unit>, span: impl Into<Span>) -> Expression {
|
pub fn size(i: impl Into<Number>, unit: impl Into<Unit>) -> Expression {
|
||||||
let span = span.into();
|
Expression::Literal(Literal::Size(i.into(), unit.into()))
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::Size(i.into(), unit.into()).into_literal(span))
|
|
||||||
.into_expr(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn synthetic_string(s: impl Into<String>) -> Expression {
|
pub fn string(inner: impl Into<Span>) -> Expression {
|
||||||
RawExpression::Synthetic(Synthetic::String(s.into())).into_unspanned_expr()
|
Expression::Literal(Literal::String(inner.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn string(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn synthetic_string(string: impl Into<String>) -> Expression {
|
||||||
let outer = outer.into();
|
Expression::Synthetic(Synthetic::String(string.into()))
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::String(inner.into()).into_literal(outer))
|
|
||||||
.into_expr(outer)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn column_path(members: Vec<Member>, span: impl Into<Span>) -> Expression {
|
pub fn column_path(members: Vec<Member>) -> Expression {
|
||||||
let span = span.into();
|
Expression::Literal(Literal::ColumnPath(members))
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::ColumnPath(members).into_literal(span)).into_expr(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path(
|
pub fn path(head: SpannedExpression, tail: Vec<impl Into<PathMember>>) -> Expression {
|
||||||
head: Expression,
|
|
||||||
tail: Vec<impl Into<PathMember>>,
|
|
||||||
span: impl Into<Span>,
|
|
||||||
) -> Expression {
|
|
||||||
let tail = tail.into_iter().map(|t| t.into()).collect();
|
let tail = tail.into_iter().map(|t| t.into()).collect();
|
||||||
RawExpression::Path(Box::new(Path::new(head, tail))).into_expr(span.into())
|
Expression::Path(Box::new(Path::new(head, tail)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dot_member(head: Expression, next: impl Into<PathMember>) -> Expression {
|
pub fn dot_member(head: SpannedExpression, next: impl Into<PathMember>) -> Expression {
|
||||||
let Expression { expr: item, span } = head;
|
let SpannedExpression { expr: item, span } = head;
|
||||||
let next = next.into();
|
let next = next.into();
|
||||||
|
|
||||||
let new_span = head.span.until(next.span);
|
|
||||||
|
|
||||||
match item {
|
match item {
|
||||||
RawExpression::Path(path) => {
|
Expression::Path(path) => {
|
||||||
let (head, mut tail) = path.parts();
|
let (head, mut tail) = path.parts();
|
||||||
|
|
||||||
tail.push(next);
|
tail.push(next);
|
||||||
Expression::path(head, tail, new_span)
|
Expression::path(head, tail)
|
||||||
}
|
}
|
||||||
|
|
||||||
other => Expression::path(other.into_expr(span), vec![next], new_span),
|
other => Expression::path(other.into_expr(span), vec![next]),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn infix(
|
pub fn infix(
|
||||||
left: Expression,
|
left: SpannedExpression,
|
||||||
op: Spanned<impl Into<CompareOperator>>,
|
op: Spanned<impl Into<CompareOperator>>,
|
||||||
right: Expression,
|
right: SpannedExpression,
|
||||||
) -> Expression {
|
) -> Expression {
|
||||||
let new_span = left.span.until(right.span);
|
Expression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
|
||||||
|
|
||||||
RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
|
|
||||||
.into_expr(new_span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range(left: Expression, op: Span, right: Expression) -> Expression {
|
pub fn range(left: SpannedExpression, op: Span, right: SpannedExpression) -> Expression {
|
||||||
let new_span = left.span.until(right.span);
|
Expression::Range(Box::new(Range::new(left, op, right)))
|
||||||
|
|
||||||
RawExpression::Range(Box::new(Range::new(left, op, right))).into_expr(new_span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
|
pub fn file_path(path: impl Into<PathBuf>) -> Expression {
|
||||||
RawExpression::FilePath(path.into()).into_expr(outer)
|
Expression::FilePath(path.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn list(list: Vec<Expression>, span: impl Into<Span>) -> Expression {
|
pub fn list(list: Vec<SpannedExpression>) -> Expression {
|
||||||
RawExpression::List(list).into_expr(span)
|
Expression::List(list)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bare(span: impl Into<Span>) -> Expression {
|
pub fn bare() -> Expression {
|
||||||
let span = span.into();
|
Expression::Literal(Literal::Bare)
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::Bare.into_literal(span)).into_expr(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pattern(inner: impl Into<String>, outer: impl Into<Span>) -> Expression {
|
pub fn pattern(inner: impl Into<String>) -> Expression {
|
||||||
let outer = outer.into();
|
Expression::Literal(Literal::GlobPattern(inner.into()))
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::GlobPattern(inner.into()).into_literal(outer))
|
|
||||||
.into_expr(outer)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn variable(inner: impl Into<Span>) -> Expression {
|
||||||
RawExpression::Variable(Variable::Other(inner.into())).into_expr(outer)
|
Expression::Variable(Variable::Other(inner.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn external_command(inner: impl Into<Span>) -> Expression {
|
||||||
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).into_expr(outer)
|
Expression::ExternalCommand(ExternalCommand::new(inner.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn it_variable(inner: impl Into<Span>) -> Expression {
|
||||||
RawExpression::Variable(Variable::It(inner.into())).into_expr(outer)
|
Expression::Variable(Variable::It(inner.into()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Spanned<Path>> for Expression {
|
impl From<Spanned<Path>> for SpannedExpression {
|
||||||
fn from(path: Spanned<Path>) -> Expression {
|
fn from(path: Spanned<Path>) -> SpannedExpression {
|
||||||
RawExpression::Path(Box::new(path.item)).into_expr(path.span)
|
Expression::Path(Box::new(path.item)).into_expr(path.span)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -339,7 +405,7 @@ impl From<Spanned<Path>> for Expression {
|
|||||||
/// 2. Can be evaluated without additional context
|
/// 2. Can be evaluated without additional context
|
||||||
/// 3. Evaluation cannot produce an error
|
/// 3. Evaluation cannot produce an error
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub enum RawLiteral {
|
pub enum Literal {
|
||||||
Number(Number),
|
Number(Number),
|
||||||
Size(Number, Unit),
|
Size(Number, Unit),
|
||||||
String(Span),
|
String(Span),
|
||||||
@ -348,9 +414,9 @@ pub enum RawLiteral {
|
|||||||
Bare,
|
Bare,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RawLiteral {
|
impl Literal {
|
||||||
pub fn into_literal(self, span: impl Into<Span>) -> Literal {
|
pub fn into_spanned(self, span: impl Into<Span>) -> SpannedLiteral {
|
||||||
Literal {
|
SpannedLiteral {
|
||||||
literal: self,
|
literal: self,
|
||||||
span: span.into(),
|
span: span.into(),
|
||||||
}
|
}
|
||||||
@ -358,36 +424,57 @@ impl RawLiteral {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub struct Literal {
|
pub struct SpannedLiteral {
|
||||||
pub literal: RawLiteral,
|
pub literal: Literal,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ShellTypeName for Literal {
|
impl ShellTypeName for Literal {
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
match &self.literal {
|
match &self {
|
||||||
RawLiteral::Number(..) => "number",
|
Literal::Number(..) => "number",
|
||||||
RawLiteral::Size(..) => "size",
|
Literal::Size(..) => "size",
|
||||||
RawLiteral::String(..) => "string",
|
Literal::String(..) => "string",
|
||||||
RawLiteral::ColumnPath(..) => "column path",
|
Literal::ColumnPath(..) => "column path",
|
||||||
RawLiteral::Bare => "string",
|
Literal::Bare => "string",
|
||||||
RawLiteral::GlobPattern(_) => "pattern",
|
Literal::GlobPattern(_) => "pattern",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Literal {
|
impl PrettyDebugWithSource for SpannedLiteral {
|
||||||
|
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||||
|
match refine {
|
||||||
|
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||||
|
PrettyDebugRefineKind::WithContext => match &self.literal {
|
||||||
|
Literal::Number(number) => number.pretty(),
|
||||||
|
Literal::Size(number, unit) => (number.pretty() + unit.pretty()).group(),
|
||||||
|
Literal::String(string) => b::primitive(format!("{:?}", string.slice(source))),
|
||||||
|
Literal::GlobPattern(pattern) => b::primitive(pattern),
|
||||||
|
Literal::ColumnPath(path) => {
|
||||||
|
b::intersperse_with_source(path.iter(), b::space(), source)
|
||||||
|
}
|
||||||
|
Literal::Bare => b::delimit("b\"", b::primitive(self.span.slice(source)), "\""),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match &self.literal {
|
match &self.literal {
|
||||||
RawLiteral::Number(number) => number.pretty(),
|
Literal::Number(number) => number.pretty(),
|
||||||
RawLiteral::Size(number, unit) => (number.pretty() + unit.pretty()).group(),
|
Literal::Size(number, unit) => {
|
||||||
RawLiteral::String(string) => b::primitive(format!("{:?}", string.slice(source))),
|
b::typed("size", (number.pretty() + unit.pretty()).group())
|
||||||
RawLiteral::GlobPattern(pattern) => b::typed("pattern", b::primitive(pattern)),
|
}
|
||||||
RawLiteral::ColumnPath(path) => b::typed(
|
Literal::String(string) => b::typed(
|
||||||
|
"string",
|
||||||
|
b::primitive(format!("{:?}", string.slice(source))),
|
||||||
|
),
|
||||||
|
Literal::GlobPattern(pattern) => b::typed("pattern", b::primitive(pattern)),
|
||||||
|
Literal::ColumnPath(path) => b::typed(
|
||||||
"column path",
|
"column path",
|
||||||
b::intersperse_with_source(path.iter(), b::space(), source),
|
b::intersperse_with_source(path.iter(), b::space(), source),
|
||||||
),
|
),
|
||||||
RawLiteral::Bare => b::primitive(self.span.slice(source)),
|
Literal::Bare => b::typed("bare", b::primitive(self.span.slice(source))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,42 +1,99 @@
|
|||||||
use crate::commands::classified::{internal::InternalCommand, ClassifiedCommand};
|
use crate::commands::classified::{internal::InternalCommand, ClassifiedCommand};
|
||||||
use crate::hir::TokensIterator;
|
use crate::hir::expand_external_tokens::{ExternalTokensShape, ExternalTokensSyntax};
|
||||||
use crate::hir::{self, named::NamedValue, syntax_shape::*, NamedArguments};
|
use crate::hir::{
|
||||||
|
self, named::NamedValue, syntax_shape::*, Expression, NamedArguments, SpannedExpression,
|
||||||
|
TokensIterator,
|
||||||
|
};
|
||||||
use crate::parse::files::Files;
|
use crate::parse::files::Files;
|
||||||
use crate::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
use crate::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
||||||
use crate::TokenNode;
|
use crate::SpannedToken;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_protocol::{PathMember, Signature, SyntaxShape};
|
use nu_protocol::{outln, PathMember, Signature, SyntaxShape};
|
||||||
use nu_source::{HasSpan, Span, Tag, Text};
|
use nu_source::{HasSpan, PrettyDebugWithSource, Span, SpannedItem, Tag, Text};
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_string() {
|
fn test_parse_external() {
|
||||||
parse_tokens(StringShape, vec![b::string("hello")], |tokens| {
|
|
||||||
hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span())
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_path() {
|
|
||||||
parse_tokens(
|
parse_tokens(
|
||||||
VariablePathShape,
|
fallible(ExternalTokensShape),
|
||||||
vec![b::var("it"), b::dot(), b::bare("cpu")],
|
"5kb",
|
||||||
|
vec![b::bare("5kb")],
|
||||||
|tokens| {
|
|tokens| {
|
||||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
ExternalTokensSyntax::new(
|
||||||
let bare = tokens[2].expect_bare();
|
vec![format!("5kb").spanned(tokens[0].span())].spanned(tokens[0].span()),
|
||||||
hir::Expression::path(
|
|
||||||
hir::Expression::it_variable(inner_var, outer_var),
|
|
||||||
vec![PathMember::string("cpu", bare)],
|
|
||||||
outer_var.until(bare),
|
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
parse_tokens(
|
parse_tokens(
|
||||||
|
fallible(ExternalTokensShape),
|
||||||
|
"cargo +nightly run -- --features all",
|
||||||
|
vec![
|
||||||
|
b::bare("cargo"),
|
||||||
|
b::sp(),
|
||||||
|
b::external_word("+nightly"),
|
||||||
|
b::sp(),
|
||||||
|
b::bare("run"),
|
||||||
|
b::sp(),
|
||||||
|
b::external_word("--"),
|
||||||
|
b::sp(),
|
||||||
|
b::flag("features"),
|
||||||
|
b::sp(),
|
||||||
|
b::bare("all"),
|
||||||
|
],
|
||||||
|
|tokens| {
|
||||||
|
let cargo = format!("cargo").spanned(tokens[0].span());
|
||||||
|
let nightly = format!("+nightly").spanned(tokens[2].span());
|
||||||
|
let run = format!("run").spanned(tokens[4].span());
|
||||||
|
let dashdash = format!("--").spanned(tokens[6].span());
|
||||||
|
let features = format!("--features").spanned(tokens[8].span());
|
||||||
|
let all = format!("all").spanned(tokens[10].span());
|
||||||
|
let span = tokens[0].span().until(tokens[10].span());
|
||||||
|
|
||||||
|
ExternalTokensSyntax::new(
|
||||||
|
vec![cargo, nightly, run, dashdash, features, all].spanned(span),
|
||||||
|
)
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_string() {
|
||||||
|
parse_tokens(
|
||||||
|
CoerceStringShape,
|
||||||
|
r#""hello""#,
|
||||||
|
vec![b::string("hello")],
|
||||||
|
|tokens| {
|
||||||
|
Expression::string(inner_string_span(tokens[0].span())).into_expr(tokens[0].span())
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_path() {
|
||||||
|
let _ = pretty_env_logger::try_init();
|
||||||
|
|
||||||
|
parse_expr(
|
||||||
|
AnyExpressionShape,
|
||||||
|
"$it.cpu",
|
||||||
|
vec![b::it_var(), b::dot(), b::bare("cpu")],
|
||||||
|
|tokens| {
|
||||||
|
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||||
|
let bare = tokens[2].expect_bare();
|
||||||
|
Expression::path(
|
||||||
|
Expression::it_variable(inner_var).into_expr(outer_var),
|
||||||
|
vec![PathMember::string("cpu", bare)],
|
||||||
|
)
|
||||||
|
.into_expr(outer_var.until(bare))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
parse_expr(
|
||||||
VariablePathShape,
|
VariablePathShape,
|
||||||
|
r#"$cpu.amount."max ghz""#,
|
||||||
vec![
|
vec![
|
||||||
b::var("cpu"),
|
b::var("cpu"),
|
||||||
b::dot(),
|
b::dot(),
|
||||||
@ -49,14 +106,14 @@ fn test_parse_path() {
|
|||||||
let amount = tokens[2].expect_bare();
|
let amount = tokens[2].expect_bare();
|
||||||
let (outer_max_ghz, _) = tokens[4].expect_string();
|
let (outer_max_ghz, _) = tokens[4].expect_string();
|
||||||
|
|
||||||
hir::Expression::path(
|
Expression::path(
|
||||||
hir::Expression::variable(inner_var, outer_var),
|
Expression::variable(inner_var).into_expr(outer_var),
|
||||||
vec![
|
vec![
|
||||||
PathMember::string("amount", amount),
|
PathMember::string("amount", amount),
|
||||||
PathMember::string("max ghz", outer_max_ghz),
|
PathMember::string("max ghz", outer_max_ghz),
|
||||||
],
|
],
|
||||||
outer_var.until(outer_max_ghz),
|
|
||||||
)
|
)
|
||||||
|
.into_expr(outer_var.until(outer_max_ghz))
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -64,7 +121,8 @@ fn test_parse_path() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_parse_command() {
|
fn test_parse_command() {
|
||||||
parse_tokens(
|
parse_tokens(
|
||||||
ClassifiedCommandShape,
|
fallible(ClassifiedCommandShape),
|
||||||
|
"ls *.txt",
|
||||||
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
||||||
|tokens| {
|
|tokens| {
|
||||||
let bare = tokens[0].expect_bare();
|
let bare = tokens[0].expect_bare();
|
||||||
@ -72,6 +130,7 @@ fn test_parse_command() {
|
|||||||
|
|
||||||
let mut map = IndexMap::new();
|
let mut map = IndexMap::new();
|
||||||
map.insert("full".to_string(), NamedValue::AbsentSwitch);
|
map.insert("full".to_string(), NamedValue::AbsentSwitch);
|
||||||
|
map.insert("help".to_string(), NamedValue::AbsentSwitch);
|
||||||
|
|
||||||
ClassifiedCommand::Internal(InternalCommand::new(
|
ClassifiedCommand::Internal(InternalCommand::new(
|
||||||
"ls".to_string(),
|
"ls".to_string(),
|
||||||
@ -80,8 +139,8 @@ fn test_parse_command() {
|
|||||||
anchor: None,
|
anchor: None,
|
||||||
},
|
},
|
||||||
hir::Call {
|
hir::Call {
|
||||||
head: Box::new(hir::RawExpression::Command(bare).into_expr(bare)),
|
head: Box::new(Expression::Command(bare).into_expr(bare)),
|
||||||
positional: Some(vec![hir::Expression::pattern("*.txt", pat)]),
|
positional: Some(vec![Expression::pattern("*.txt").into_expr(pat)]),
|
||||||
named: Some(NamedArguments { named: map }),
|
named: Some(NamedArguments { named: map }),
|
||||||
span: bare.until(pat),
|
span: bare.until(pat),
|
||||||
},
|
},
|
||||||
@ -90,7 +149,7 @@ fn test_parse_command() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(new)]
|
#[derive(Debug, Clone, new)]
|
||||||
struct TestRegistry {
|
struct TestRegistry {
|
||||||
#[new(default)]
|
#[new(default)]
|
||||||
signatures: indexmap::IndexMap<String, Signature>,
|
signatures: indexmap::IndexMap<String, Signature>,
|
||||||
@ -103,11 +162,14 @@ impl TestRegistry {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SignatureRegistry for TestRegistry {
|
impl SignatureRegistry for TestRegistry {
|
||||||
fn has(&self, name: &str) -> Result<bool, ShellError> {
|
fn has(&self, name: &str) -> bool {
|
||||||
Ok(self.signatures.contains_key(name))
|
self.signatures.contains_key(name)
|
||||||
}
|
}
|
||||||
fn get(&self, name: &str) -> Result<Option<Signature>, ShellError> {
|
fn get(&self, name: &str) -> Option<Signature> {
|
||||||
Ok(self.signatures.get(name).cloned())
|
self.signatures.get(name).cloned()
|
||||||
|
}
|
||||||
|
fn clone_box(&self) -> Box<dyn SignatureRegistry> {
|
||||||
|
Box::new(self.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,30 +189,91 @@ fn with_empty_context(source: &Text, callback: impl FnOnce(ExpandContext)) {
|
|||||||
callback(ExpandContext::new(Box::new(registry), source, None))
|
callback(ExpandContext::new(Box::new(registry), source, None))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
|
trait Expand {}
|
||||||
shape: impl ExpandSyntax<Output = T>,
|
|
||||||
|
fn parse_tokens<T: Eq + HasSpan + PrettyDebugWithSource + Clone + Debug + 'static>(
|
||||||
|
shape: impl ExpandSyntax<Output = Result<T, ParseError>>,
|
||||||
|
syntax: &str,
|
||||||
tokens: Vec<CurriedToken>,
|
tokens: Vec<CurriedToken>,
|
||||||
expected: impl FnOnce(&[TokenNode]) -> T,
|
expected: impl FnOnce(&[SpannedToken]) -> T,
|
||||||
) {
|
) {
|
||||||
|
// let parsed_tokens = parse(syntax);
|
||||||
let tokens = b::token_list(tokens);
|
let tokens = b::token_list(tokens);
|
||||||
let (tokens, source) = b::build(tokens);
|
let (tokens, source) = b::build(tokens);
|
||||||
let text = Text::from(source);
|
let text = Text::from(&source);
|
||||||
|
|
||||||
|
assert_eq!(syntax, source);
|
||||||
|
|
||||||
with_empty_context(&text, |context| {
|
with_empty_context(&text, |context| {
|
||||||
let tokens = tokens.expect_list();
|
let tokens = tokens.expect_list();
|
||||||
let mut iterator = TokensIterator::all(tokens.item, text.clone(), tokens.span);
|
let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span);
|
||||||
|
|
||||||
let expr = expand_syntax(&shape, &mut iterator, &context);
|
let expr = iterator.expand_syntax(shape);
|
||||||
|
|
||||||
let expr = match expr {
|
let expr = match expr {
|
||||||
Ok(expr) => expr,
|
Ok(expr) => expr,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
print_err(err.into(), &context.source().clone());
|
outln!("");
|
||||||
|
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||||
|
outln!("");
|
||||||
|
|
||||||
|
print_err(err.into(), &iterator.context().source().clone());
|
||||||
panic!("Parse failed");
|
panic!("Parse failed");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(expr, expected(tokens.item));
|
let expected = expected(&tokens.item);
|
||||||
|
|
||||||
|
if expr != expected {
|
||||||
|
outln!("");
|
||||||
|
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||||
|
outln!("");
|
||||||
|
|
||||||
|
assert_eq!(expr, expected);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_expr(
|
||||||
|
shape: impl ExpandSyntax<Output = Result<SpannedExpression, ParseError>>,
|
||||||
|
syntax: &str,
|
||||||
|
tokens: Vec<CurriedToken>,
|
||||||
|
expected: impl FnOnce(&[SpannedToken]) -> SpannedExpression,
|
||||||
|
) {
|
||||||
|
// let parsed_tokens = parse(syntax);
|
||||||
|
let tokens = b::token_list(tokens);
|
||||||
|
let (tokens, source) = b::build(tokens);
|
||||||
|
let text = Text::from(&source);
|
||||||
|
|
||||||
|
assert_eq!(syntax, source);
|
||||||
|
|
||||||
|
with_empty_context(&text, |context| {
|
||||||
|
let tokens = tokens.expect_list();
|
||||||
|
let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span);
|
||||||
|
|
||||||
|
let expr = iterator.expand_syntax(shape);
|
||||||
|
|
||||||
|
let expr = match expr {
|
||||||
|
Ok(expr) => expr,
|
||||||
|
Err(err) => {
|
||||||
|
outln!("");
|
||||||
|
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||||
|
outln!("");
|
||||||
|
|
||||||
|
print_err(err.into(), &iterator.source());
|
||||||
|
panic!("Parse failed");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let expected = expected(&tokens.item);
|
||||||
|
|
||||||
|
if expr != expected {
|
||||||
|
outln!("");
|
||||||
|
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||||
|
outln!("");
|
||||||
|
|
||||||
|
assert_eq!(expr, expected);
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::{hir::Expression, CompareOperator};
|
use crate::{hir::SpannedExpression, CompareOperator};
|
||||||
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
@ -10,9 +10,9 @@ use serde::{Deserialize, Serialize};
|
|||||||
)]
|
)]
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
pub struct Binary {
|
pub struct Binary {
|
||||||
left: Expression,
|
left: SpannedExpression,
|
||||||
op: Spanned<CompareOperator>,
|
op: Spanned<CompareOperator>,
|
||||||
right: Expression,
|
right: SpannedExpression,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Binary {
|
impl PrettyDebugWithSource for Binary {
|
||||||
|
@ -1,17 +1,14 @@
|
|||||||
|
use crate::parse::token_tree::Token;
|
||||||
use crate::{
|
use crate::{
|
||||||
hir::syntax_shape::{
|
hir::syntax_shape::{ExpandSyntax, FlatShape, MaybeSpaceShape},
|
||||||
color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax,
|
|
||||||
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, MaybeSpaceShape,
|
|
||||||
UnspannedAtomicToken,
|
|
||||||
},
|
|
||||||
hir::Expression,
|
|
||||||
TokensIterator,
|
TokensIterator,
|
||||||
};
|
};
|
||||||
|
use derive_new::new;
|
||||||
use nu_errors::ParseError;
|
use nu_errors::ParseError;
|
||||||
use nu_protocol::SpannedTypeName;
|
use nu_protocol::SpannedTypeName;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem};
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Eq, PartialEq, Clone, new)]
|
||||||
pub struct ExternalTokensSyntax {
|
pub struct ExternalTokensSyntax {
|
||||||
pub tokens: Spanned<Vec<Spanned<String>>>,
|
pub tokens: Spanned<Vec<Spanned<String>>>,
|
||||||
}
|
}
|
||||||
@ -40,57 +37,25 @@ impl ExpandSyntax for ExternalTokensShape {
|
|||||||
type Output = ExternalTokensSyntax;
|
type Output = ExternalTokensSyntax;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"external command"
|
"external tokens"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> ExternalTokensSyntax {
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
let mut out: Vec<Spanned<String>> = vec![];
|
let mut out: Vec<Spanned<String>> = vec![];
|
||||||
|
|
||||||
let start = token_nodes.span_at_cursor();
|
let start = token_nodes.span_at_cursor();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match expand_syntax(&ExternalExpressionShape, token_nodes, context) {
|
match token_nodes.expand_syntax(ExternalExpressionShape) {
|
||||||
Err(_) | Ok(None) => break,
|
Err(_) => break,
|
||||||
Ok(Some(span)) => out.push(span.spanned_string(context.source())),
|
Ok(span) => out.push(span.spanned_string(&token_nodes.source())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let end = token_nodes.span_at_cursor();
|
let end = token_nodes.span_at_cursor();
|
||||||
|
|
||||||
Ok(ExternalTokensSyntax {
|
ExternalTokensSyntax {
|
||||||
tokens: out.spanned(start.until(end)),
|
tokens: out.spanned(start.until(end)),
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColorSyntax for ExternalTokensShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"ExternalTokensShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Self::Info {
|
|
||||||
loop {
|
|
||||||
// Allow a space
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
||||||
|
|
||||||
// Process an external expression. External expressions are mostly words, with a
|
|
||||||
// few exceptions (like $variables and path expansion rules)
|
|
||||||
match color_syntax(&ExternalExpressionShape, token_nodes, context).1 {
|
|
||||||
ExternalExpressionResult::Eof => break,
|
|
||||||
ExternalExpressionResult::Processed => continue,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -99,208 +64,112 @@ impl ColorSyntax for ExternalTokensShape {
|
|||||||
pub struct ExternalExpressionShape;
|
pub struct ExternalExpressionShape;
|
||||||
|
|
||||||
impl ExpandSyntax for ExternalExpressionShape {
|
impl ExpandSyntax for ExternalExpressionShape {
|
||||||
type Output = Option<Span>;
|
type Output = Result<Span, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"external expression"
|
"external expression"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||||
&self,
|
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
expand_syntax(&MaybeSpaceShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
let first = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"external command",
|
|
||||||
context,
|
|
||||||
ExpansionRule::new().allow_external_word(),
|
|
||||||
)?
|
|
||||||
.span;
|
|
||||||
|
|
||||||
|
let first = token_nodes.expand_syntax(ExternalStartToken)?;
|
||||||
let mut last = first;
|
let mut last = first;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let continuation = expand_expr(&ExternalContinuationShape, token_nodes, context);
|
let continuation = token_nodes.expand_syntax(ExternalStartToken);
|
||||||
|
|
||||||
if let Ok(continuation) = continuation {
|
if let Ok(continuation) = continuation {
|
||||||
last = continuation.span;
|
last = continuation;
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(first.until(last)))
|
Ok(first.until(last))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
struct ExternalHeadShape;
|
struct ExternalStartToken;
|
||||||
|
|
||||||
|
impl ExpandSyntax for ExternalStartToken {
|
||||||
|
type Output = Result<Span, ParseError>;
|
||||||
|
|
||||||
impl ExpandExpression for ExternalHeadShape {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"external argument"
|
"external start token"
|
||||||
}
|
}
|
||||||
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||||
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
let mut span: Option<Span> = None;
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
loop {
|
||||||
&self,
|
let boundary = token_nodes.expand_infallible(PeekExternalBoundary);
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Expression, ParseError> {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"external argument",
|
|
||||||
context,
|
|
||||||
ExpansionRule::new()
|
|
||||||
.allow_external_word()
|
|
||||||
.treat_size_as_word(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let span = atom.span;
|
if boundary {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(match &atom.unspanned {
|
let peeked = token_nodes.peek().not_eof("external start token")?;
|
||||||
UnspannedAtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"),
|
let node = peeked.node;
|
||||||
UnspannedAtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"),
|
|
||||||
UnspannedAtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"),
|
let new_span = match node.unspanned() {
|
||||||
UnspannedAtomicToken::Whitespace { .. } => {
|
Token::Comment(_)
|
||||||
unreachable!("ExpansionRule doesn't allow Whitespace")
|
| Token::Separator
|
||||||
|
| Token::Whitespace
|
||||||
|
| Token::Pipeline(_) => {
|
||||||
|
return Err(ParseError::mismatch(
|
||||||
|
"external start token",
|
||||||
|
node.spanned_type_name(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
let node = peeked.commit();
|
||||||
|
node.span()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
span = match span {
|
||||||
|
None => Some(new_span),
|
||||||
|
Some(before) => Some(before.until(new_span)),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
UnspannedAtomicToken::Separator { .. } => {
|
|
||||||
unreachable!("ExpansionRule doesn't allow Separator")
|
match span {
|
||||||
}
|
None => Err(token_nodes.err_next_token("external start token")),
|
||||||
UnspannedAtomicToken::Comment { .. } => {
|
Some(span) => {
|
||||||
unreachable!("ExpansionRule doesn't allow Comment")
|
token_nodes.color_shape(FlatShape::ExternalWord.spanned(span));
|
||||||
}
|
Ok(span)
|
||||||
UnspannedAtomicToken::ShorthandFlag { .. }
|
}
|
||||||
| UnspannedAtomicToken::SquareDelimited { .. }
|
|
||||||
| UnspannedAtomicToken::RoundDelimited { .. } => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
"external command name",
|
|
||||||
atom.spanned_type_name(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::ExternalCommand { command } => {
|
|
||||||
Expression::external_command(*command, span)
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Number { number } => {
|
|
||||||
Expression::number(number.to_number(context.source()), span)
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::String { body } => Expression::string(*body, span),
|
|
||||||
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span),
|
|
||||||
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span),
|
|
||||||
UnspannedAtomicToken::ExternalWord { .. }
|
|
||||||
| UnspannedAtomicToken::GlobPattern { .. }
|
|
||||||
| UnspannedAtomicToken::Word { .. }
|
|
||||||
| UnspannedAtomicToken::Dot { .. }
|
|
||||||
| UnspannedAtomicToken::DotDot { .. }
|
|
||||||
| UnspannedAtomicToken::CompareOperator { .. } => {
|
|
||||||
Expression::external_command(span, span)
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
struct ExternalContinuationShape;
|
struct PeekExternalBoundary;
|
||||||
|
|
||||||
impl ExpandExpression for ExternalContinuationShape {
|
impl ExpandSyntax for PeekExternalBoundary {
|
||||||
fn name(&self) -> &'static str {
|
type Output = bool;
|
||||||
"external argument"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Expression, ParseError> {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"external argument",
|
|
||||||
context,
|
|
||||||
ExpansionRule::new()
|
|
||||||
.allow_external_word()
|
|
||||||
.treat_size_as_word(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let span = atom.span;
|
|
||||||
|
|
||||||
Ok(match &atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"),
|
|
||||||
UnspannedAtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"),
|
|
||||||
UnspannedAtomicToken::Number { number } => {
|
|
||||||
Expression::number(number.to_number(context.source()), span)
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"),
|
|
||||||
UnspannedAtomicToken::ExternalCommand { .. } => {
|
|
||||||
unreachable!("ExpansionRule doesn't allow ExternalCommand")
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Whitespace { .. } => {
|
|
||||||
unreachable!("ExpansionRule doesn't allow Whitespace")
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Separator { .. } => {
|
|
||||||
unreachable!("ExpansionRule doesn't allow Separator")
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Comment { .. } => {
|
|
||||||
unreachable!("ExpansionRule doesn't allow Comment")
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::String { body } => Expression::string(*body, span),
|
|
||||||
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span),
|
|
||||||
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span),
|
|
||||||
UnspannedAtomicToken::ExternalWord { .. }
|
|
||||||
| UnspannedAtomicToken::GlobPattern { .. }
|
|
||||||
| UnspannedAtomicToken::Word { .. }
|
|
||||||
| UnspannedAtomicToken::ShorthandFlag { .. }
|
|
||||||
| UnspannedAtomicToken::Dot { .. }
|
|
||||||
| UnspannedAtomicToken::DotDot { .. }
|
|
||||||
| UnspannedAtomicToken::CompareOperator { .. } => Expression::bare(span),
|
|
||||||
UnspannedAtomicToken::SquareDelimited { .. }
|
|
||||||
| UnspannedAtomicToken::RoundDelimited { .. } => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
"external argument",
|
|
||||||
atom.spanned_type_name(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColorSyntax for ExternalExpressionShape {
|
|
||||||
type Info = ExternalExpressionResult;
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"ExternalExpressionShape"
|
"external boundary"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output {
|
||||||
&self,
|
let next = token_nodes.peek();
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> ExternalExpressionResult {
|
|
||||||
let atom = match expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"external word",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
) {
|
|
||||||
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
|
|
||||||
Ok(AtomicToken {
|
|
||||||
unspanned: UnspannedAtomicToken::Eof { .. },
|
|
||||||
..
|
|
||||||
}) => return ExternalExpressionResult::Eof,
|
|
||||||
Ok(atom) => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
match next.node {
|
||||||
ExternalExpressionResult::Processed
|
None => true,
|
||||||
|
Some(node) => match node.unspanned() {
|
||||||
|
Token::Delimited(_) => true,
|
||||||
|
Token::Whitespace => true,
|
||||||
|
Token::Comment(_) => true,
|
||||||
|
Token::Separator => true,
|
||||||
|
Token::Call(_) => true,
|
||||||
|
_ => false,
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub enum ExternalExpressionResult {
|
|
||||||
Eof,
|
|
||||||
Processed,
|
|
||||||
}
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
use crate::hir::Expression;
|
use crate::hir::SpannedExpression;
|
||||||
use crate::Flag;
|
use crate::Flag;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Tag};
|
use nu_source::{b, DebugDocBuilder, PrettyDebugRefineKind, PrettyDebugWithSource, Tag};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||||
@ -10,7 +10,7 @@ pub enum NamedValue {
|
|||||||
AbsentSwitch,
|
AbsentSwitch,
|
||||||
PresentSwitch(Tag),
|
PresentSwitch(Tag),
|
||||||
AbsentValue,
|
AbsentValue,
|
||||||
Value(Expression),
|
Value(SpannedExpression),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for NamedValue {
|
impl PrettyDebugWithSource for NamedValue {
|
||||||
@ -22,6 +22,18 @@ impl PrettyDebugWithSource for NamedValue {
|
|||||||
NamedValue::Value(value) => value.pretty_debug(source),
|
NamedValue::Value(value) => value.pretty_debug(source),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||||
|
match refine {
|
||||||
|
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||||
|
PrettyDebugRefineKind::WithContext => match self {
|
||||||
|
NamedValue::AbsentSwitch => b::value("absent"),
|
||||||
|
NamedValue::PresentSwitch(_) => b::value("present"),
|
||||||
|
NamedValue::AbsentValue => b::value("absent"),
|
||||||
|
NamedValue::Value(value) => value.refined_pretty_debug(refine, source),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||||
@ -37,6 +49,10 @@ impl NamedArguments {
|
|||||||
pub fn iter(&self) -> impl Iterator<Item = (&String, &NamedValue)> {
|
pub fn iter(&self) -> impl Iterator<Item = (&String, &NamedValue)> {
|
||||||
self.named.iter()
|
self.named.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get(&self, name: &str) -> Option<&NamedValue> {
|
||||||
|
self.named.get(name)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NamedArguments {
|
impl NamedArguments {
|
||||||
@ -56,28 +72,37 @@ impl NamedArguments {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_optional(&mut self, name: impl Into<String>, expr: Option<Expression>) {
|
pub fn insert_optional(&mut self, name: impl Into<String>, expr: Option<SpannedExpression>) {
|
||||||
match expr {
|
match expr {
|
||||||
None => self.named.insert(name.into(), NamedValue::AbsentValue),
|
None => self.named.insert(name.into(), NamedValue::AbsentValue),
|
||||||
Some(expr) => self.named.insert(name.into(), NamedValue::Value(expr)),
|
Some(expr) => self.named.insert(name.into(), NamedValue::Value(expr)),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_mandatory(&mut self, name: impl Into<String>, expr: Expression) {
|
pub fn insert_mandatory(&mut self, name: impl Into<String>, expr: SpannedExpression) {
|
||||||
self.named.insert(name.into(), NamedValue::Value(expr));
|
self.named.insert(name.into(), NamedValue::Value(expr));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for NamedArguments {
|
impl PrettyDebugWithSource for NamedArguments {
|
||||||
|
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||||
|
match refine {
|
||||||
|
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||||
|
PrettyDebugRefineKind::WithContext => b::intersperse(
|
||||||
|
self.named.iter().map(|(key, value)| {
|
||||||
|
b::key(key)
|
||||||
|
+ b::equals()
|
||||||
|
+ value.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source)
|
||||||
|
}),
|
||||||
|
b::space(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::delimit(
|
b::delimit(
|
||||||
"(",
|
"(",
|
||||||
b::intersperse(
|
self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source),
|
||||||
self.named
|
|
||||||
.iter()
|
|
||||||
.map(|(key, value)| b::key(key) + b::equals() + value.pretty_debug(source)),
|
|
||||||
b::space(),
|
|
||||||
),
|
|
||||||
")",
|
")",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::hir::Expression;
|
use crate::hir::SpannedExpression;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::{Getters, MutGetters};
|
use getset::{Getters, MutGetters};
|
||||||
use nu_protocol::PathMember;
|
use nu_protocol::PathMember;
|
||||||
@ -21,7 +21,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
)]
|
)]
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
pub struct Path {
|
pub struct Path {
|
||||||
head: Expression,
|
head: SpannedExpression,
|
||||||
#[get_mut = "pub(crate)"]
|
#[get_mut = "pub(crate)"]
|
||||||
tail: Vec<PathMember>,
|
tail: Vec<PathMember>,
|
||||||
}
|
}
|
||||||
@ -35,7 +35,7 @@ impl PrettyDebugWithSource for Path {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Path {
|
impl Path {
|
||||||
pub(crate) fn parts(self) -> (Expression, Vec<PathMember>) {
|
pub(crate) fn parts(self) -> (SpannedExpression, Vec<PathMember>) {
|
||||||
(self.head, self.tail)
|
(self.head, self.tail)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::hir::Expression;
|
use crate::hir::SpannedExpression;
|
||||||
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
@ -10,11 +10,11 @@ use serde::{Deserialize, Serialize};
|
|||||||
)]
|
)]
|
||||||
pub struct Range {
|
pub struct Range {
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
left: Expression,
|
left: SpannedExpression,
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
dotdot: Span,
|
dotdot: Span,
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
right: Expression,
|
right: SpannedExpression,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Range {
|
impl PrettyDebugWithSource for Range {
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,141 +1,82 @@
|
|||||||
|
use crate::hir::Expression;
|
||||||
use crate::{
|
use crate::{
|
||||||
hir,
|
hir,
|
||||||
hir::syntax_shape::{
|
hir::syntax_shape::{
|
||||||
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
|
ExpandSyntax, ExpressionContinuationShape, MemberShape, PathTailShape, PathTailSyntax,
|
||||||
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
|
|
||||||
ExpressionListShape, FallibleColorSyntax, MemberShape, PathTailShape, PathTailSyntax,
|
|
||||||
VariablePathShape,
|
VariablePathShape,
|
||||||
},
|
},
|
||||||
hir::tokens_iterator::TokensIterator,
|
hir::tokens_iterator::TokensIterator,
|
||||||
parse::token_tree::Delimiter,
|
|
||||||
};
|
};
|
||||||
use nu_errors::{ParseError, ShellError};
|
use hir::SpannedExpression;
|
||||||
|
use nu_errors::ParseError;
|
||||||
use nu_source::Span;
|
use nu_source::Span;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct AnyBlockShape;
|
pub struct CoerceBlockShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for AnyBlockShape {
|
impl ExpandSyntax for CoerceBlockShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"AnyBlockShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let block = token_nodes.peek_non_ws().not_eof("block");
|
|
||||||
|
|
||||||
let block = match block {
|
|
||||||
Err(_) => return Ok(()),
|
|
||||||
Ok(block) => block,
|
|
||||||
};
|
|
||||||
|
|
||||||
// is it just a block?
|
|
||||||
let block = block.node.as_block();
|
|
||||||
|
|
||||||
if let Some((children, spans)) = block {
|
|
||||||
token_nodes.child(children, context.source.clone(), |token_nodes| {
|
|
||||||
color_syntax_with(
|
|
||||||
&DelimitedShape,
|
|
||||||
&(Delimiter::Brace, spans.0, spans.1),
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, look for a shorthand block. If none found, fail
|
|
||||||
color_fallible_syntax(&ShorthandBlock, token_nodes, context)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandExpression for AnyBlockShape {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"any block"
|
"any block"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
|
||||||
let block = token_nodes.peek_non_ws().not_eof("block")?;
|
|
||||||
|
|
||||||
// is it just a block?
|
// is it just a block?
|
||||||
let block = block.node.as_block();
|
token_nodes
|
||||||
|
.expand_syntax(BlockShape)
|
||||||
if let Some((block, _tags)) = block {
|
.or_else(|_| token_nodes.expand_syntax(ShorthandBlockShape))
|
||||||
let mut iterator =
|
|
||||||
TokensIterator::new(&block.item, block.span, context.source.clone(), false);
|
|
||||||
|
|
||||||
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?.exprs;
|
|
||||||
|
|
||||||
return Ok(hir::RawExpression::Block(exprs.item).into_expr(block.span));
|
|
||||||
}
|
|
||||||
|
|
||||||
expand_syntax(&ShorthandBlock, token_nodes, context)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ShorthandBlock;
|
pub struct BlockShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for ShorthandBlock {
|
impl ExpandSyntax for BlockShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"ShorthandBlock"
|
"block"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<(), ShellError> {
|
let exprs = token_nodes.block()?;
|
||||||
// Try to find a shorthand head. If none found, fail
|
|
||||||
color_fallible_syntax(&ShorthandPath, token_nodes, context)?;
|
|
||||||
|
|
||||||
loop {
|
Ok(hir::Expression::Block(exprs.item).into_expr(exprs.span))
|
||||||
// Check to see whether there's any continuation after the head expression
|
|
||||||
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
// if no continuation was found, we're done
|
|
||||||
Err(_) => break,
|
|
||||||
// if a continuation was found, look for another one
|
|
||||||
Ok(_) => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpandExpression for ShorthandBlock {
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ShorthandBlockShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for ShorthandBlockShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"shorthand block"
|
"shorthand block"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
let mut current = token_nodes.expand_syntax(ShorthandPath)?;
|
||||||
let path = expand_expr(&ShorthandPath, token_nodes, context)?;
|
|
||||||
let start = path.span;
|
loop {
|
||||||
let expr = continue_expression(path, token_nodes, context);
|
match token_nodes.expand_syntax(ExpressionContinuationShape) {
|
||||||
let end = expr.span;
|
Result::Err(_) => break,
|
||||||
let block = hir::RawExpression::Block(vec![expr]).into_expr(start.until(end));
|
Result::Ok(continuation) => current = continuation.append_to(current),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let span = current.span;
|
||||||
|
|
||||||
|
let block = hir::Expression::Block(vec![current]).into_expr(span);
|
||||||
|
|
||||||
Ok(block)
|
Ok(block)
|
||||||
}
|
}
|
||||||
@ -145,74 +86,40 @@ impl ExpandExpression for ShorthandBlock {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ShorthandPath;
|
pub struct ShorthandPath;
|
||||||
|
|
||||||
impl FallibleColorSyntax for ShorthandPath {
|
impl ExpandSyntax for ShorthandPath {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"ShorthandPath"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes.atomic(|token_nodes| {
|
|
||||||
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context);
|
|
||||||
|
|
||||||
if variable.is_ok() {
|
|
||||||
// if it's a variable path, that's the head part
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// otherwise, we'll try to find a member path
|
|
||||||
|
|
||||||
// look for a member (`<member>` -> `$it.<member>`)
|
|
||||||
color_fallible_syntax(&MemberShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
|
||||||
// like any other path.
|
|
||||||
// It's ok if there's no path tail; a single member is sufficient
|
|
||||||
let _ = color_fallible_syntax(&PathTailShape, token_nodes, context);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandExpression for ShorthandPath {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"shorthand path"
|
"shorthand path"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
|
||||||
// if it's a variable path, that's the head part
|
// if it's a variable path, that's the head part
|
||||||
let path = expand_expr(&VariablePathShape, token_nodes, context);
|
let path = token_nodes.expand_syntax(VariablePathShape);
|
||||||
|
|
||||||
if let Ok(path) = path {
|
if let Ok(path) = path {
|
||||||
return Ok(path);
|
return Ok(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
|
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
|
||||||
let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?;
|
let mut head = token_nodes.expand_syntax(ShorthandHeadShape)?;
|
||||||
|
|
||||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
||||||
// like any other path.
|
// like any other path.
|
||||||
let tail = expand_syntax(&PathTailShape, token_nodes, context);
|
let tail = token_nodes.expand_syntax(PathTailShape);
|
||||||
|
|
||||||
match tail {
|
match tail {
|
||||||
Err(_) => Ok(head),
|
Err(_) => Ok(head),
|
||||||
Ok(PathTailSyntax { tail, .. }) => {
|
Ok(PathTailSyntax { tail, span }) => {
|
||||||
|
let span = head.span.until(span);
|
||||||
|
|
||||||
// For each member that `PathTailShape` expanded, join it onto the existing expression
|
// For each member that `PathTailShape` expanded, join it onto the existing expression
|
||||||
// to form a new path
|
// to form a new path
|
||||||
for member in tail {
|
for member in tail {
|
||||||
head = hir::Expression::dot_member(head, member);
|
head = Expression::dot_member(head, member).into_expr(span);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(head)
|
Ok(head)
|
||||||
@ -225,27 +132,28 @@ impl ExpandExpression for ShorthandPath {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ShorthandHeadShape;
|
pub struct ShorthandHeadShape;
|
||||||
|
|
||||||
impl ExpandExpression for ShorthandHeadShape {
|
impl ExpandSyntax for ShorthandHeadShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"shorthand head"
|
"shorthand head"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
let head = token_nodes.expand_syntax(MemberShape)?;
|
||||||
let head = expand_syntax(&MemberShape, token_nodes, context)?;
|
let head = head.to_path_member(&token_nodes.source());
|
||||||
let head = head.to_path_member(context.source);
|
|
||||||
|
|
||||||
// Synthesize an `$it` expression
|
// Synthesize an `$it` expression
|
||||||
let it = synthetic_it();
|
let it = synthetic_it();
|
||||||
let span = head.span;
|
let span = head.span;
|
||||||
|
|
||||||
Ok(hir::Expression::path(it, vec![head], span))
|
Ok(Expression::path(it, vec![head]).into_expr(span))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn synthetic_it() -> hir::Expression {
|
fn synthetic_it() -> hir::SpannedExpression {
|
||||||
hir::Expression::it_variable(Span::unknown(), Span::unknown())
|
Expression::it_variable(Span::unknown()).into_expr(Span::unknown())
|
||||||
}
|
}
|
||||||
|
72
crates/nu-parser/src/hir/syntax_shape/design.md
Normal file
72
crates/nu-parser/src/hir/syntax_shape/design.md
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
# Meaningful Primitive Tokens
|
||||||
|
|
||||||
|
- `int`
|
||||||
|
- `decimal`
|
||||||
|
- `op::name`
|
||||||
|
- `dot`
|
||||||
|
- `dotdot`
|
||||||
|
- `string`
|
||||||
|
- `var::it`
|
||||||
|
- `var::other`
|
||||||
|
- `external-command`
|
||||||
|
- `pattern::glob`
|
||||||
|
- `word`
|
||||||
|
- `comment`
|
||||||
|
- `whitespace`
|
||||||
|
- `separator`
|
||||||
|
- `longhand-flag`
|
||||||
|
- `shorthand-flag`
|
||||||
|
|
||||||
|
# Grouped Tokens
|
||||||
|
|
||||||
|
- `(call head ...tail)`
|
||||||
|
- `(list ...nodes)`
|
||||||
|
- `(paren ...nodes)`
|
||||||
|
- `(square ...nodes)`
|
||||||
|
- `(curly ...nodes)`
|
||||||
|
- `(pipeline ...elements) where elements: pipeline-element`
|
||||||
|
- `(pipeline-element pipe? token)`
|
||||||
|
|
||||||
|
# Atomic Tokens
|
||||||
|
|
||||||
|
- `(unit number unit) where number: number, unit: unit`
|
||||||
|
|
||||||
|
# Expression
|
||||||
|
|
||||||
|
```
|
||||||
|
start(ExpressionStart) continuation(ExpressionContinuation)* ->
|
||||||
|
```
|
||||||
|
|
||||||
|
## ExpressionStart
|
||||||
|
|
||||||
|
```
|
||||||
|
word -> String
|
||||||
|
unit -> Unit
|
||||||
|
number -> Number
|
||||||
|
string -> String
|
||||||
|
var::it -> Var::It
|
||||||
|
var::other -> Var::Other
|
||||||
|
pattern::glob -> Pattern::Glob
|
||||||
|
square -> Array
|
||||||
|
```
|
||||||
|
|
||||||
|
## TightExpressionContinuation
|
||||||
|
|
||||||
|
```
|
||||||
|
dot AnyExpression -> Member
|
||||||
|
dodot AnyExpression -> RangeContinuation
|
||||||
|
```
|
||||||
|
|
||||||
|
## InfixExpressionContinuation
|
||||||
|
|
||||||
|
```
|
||||||
|
whitespace op whitespace AnyExpression -> InfixContinuation
|
||||||
|
```
|
||||||
|
|
||||||
|
## Member
|
||||||
|
|
||||||
|
```
|
||||||
|
int -> Member::Int
|
||||||
|
word -> Member::Word
|
||||||
|
string -> Member::String
|
||||||
|
```
|
@ -1,4 +1,3 @@
|
|||||||
pub(crate) mod atom;
|
|
||||||
pub(crate) mod delimited;
|
pub(crate) mod delimited;
|
||||||
pub(crate) mod file_path;
|
pub(crate) mod file_path;
|
||||||
pub(crate) mod list;
|
pub(crate) mod list;
|
||||||
@ -10,311 +9,64 @@ pub(crate) mod unit;
|
|||||||
pub(crate) mod variable_path;
|
pub(crate) mod variable_path;
|
||||||
|
|
||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom,
|
BareExpressionShape, DelimitedSquareShape, ExpandContext, ExpandSyntax,
|
||||||
expand_delimited_square, expand_expr, expand_syntax, BareShape, ColorableDotShape, DotShape,
|
ExpressionContinuationShape, NumberExpressionShape, PatternExpressionShape,
|
||||||
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation,
|
StringExpressionShape, UnitExpressionShape, VariableShape,
|
||||||
ExpressionContinuationShape, FallibleColorSyntax, FlatShape, UnspannedAtomicToken,
|
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::hir::{SpannedExpression, TokensIterator};
|
||||||
hir,
|
use nu_errors::ParseError;
|
||||||
hir::{Expression, TokensIterator},
|
|
||||||
};
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_source::{HasSpan, Span, Spanned, SpannedItem, Tag};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct AnyExpressionShape;
|
pub struct AnyExpressionShape;
|
||||||
|
|
||||||
impl ExpandExpression for AnyExpressionShape {
|
impl ExpandSyntax for AnyExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"any expression"
|
"any expression"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
// Look for an expression at the cursor
|
// Look for an atomic expression at the cursor
|
||||||
let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?;
|
let mut current = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
||||||
|
|
||||||
Ok(continue_expression(head, token_nodes, context))
|
loop {
|
||||||
}
|
match token_nodes.expand_syntax(ExpressionContinuationShape) {
|
||||||
}
|
Err(_) => return Ok(current),
|
||||||
|
Ok(continuation) => current = continuation.append_to(current),
|
||||||
impl FallibleColorSyntax for AnyExpressionShape {
|
}
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"AnyExpressionShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
// Look for an expression at the cursor
|
|
||||||
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
match continue_coloring_expression(token_nodes, context) {
|
|
||||||
Err(_) => {
|
|
||||||
// it's fine for there to be no continuation
|
|
||||||
}
|
}
|
||||||
|
})
|
||||||
Ok(()) => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn continue_expression(
|
|
||||||
mut head: hir::Expression,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> hir::Expression {
|
|
||||||
loop {
|
|
||||||
// Check to see whether there's any continuation after the head expression
|
|
||||||
let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context);
|
|
||||||
|
|
||||||
match continuation {
|
|
||||||
// If there's no continuation, return the head
|
|
||||||
Err(_) => return head,
|
|
||||||
// Otherwise, form a new expression by combining the head with the continuation
|
|
||||||
Ok(continuation) => match continuation {
|
|
||||||
// If the continuation is a `.member`, form a path with the new member
|
|
||||||
ExpressionContinuation::DotSuffix(_dot, member) => {
|
|
||||||
head = Expression::dot_member(head, member);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, if the continuation is an infix suffix, form an infix expression
|
|
||||||
ExpressionContinuation::InfixSuffix(op, expr) => {
|
|
||||||
head = Expression::infix(head, op, expr);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn continue_coloring_expression(
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
// if there's not even one expression continuation, fail
|
|
||||||
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
// Check to see whether there's any continuation after the head expression
|
|
||||||
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
|
|
||||||
|
|
||||||
if result.is_err() {
|
|
||||||
// We already saw one continuation, so just return
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct AnyExpressionStartShape;
|
pub struct AnyExpressionStartShape;
|
||||||
|
|
||||||
impl ExpandExpression for AnyExpressionStartShape {
|
impl ExpandSyntax for AnyExpressionStartShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"any expression start"
|
"any expression start"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
token_nodes
|
||||||
let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?;
|
.expand_syntax(VariableShape)
|
||||||
|
.or_else(|_| token_nodes.expand_syntax(UnitExpressionShape))
|
||||||
match atom.unspanned {
|
.or_else(|_| token_nodes.expand_syntax(BareExpressionShape))
|
||||||
UnspannedAtomicToken::Size { number, unit } => Ok(hir::Expression::size(
|
.or_else(|_| token_nodes.expand_syntax(PatternExpressionShape))
|
||||||
number.to_number(context.source),
|
.or_else(|_| token_nodes.expand_syntax(NumberExpressionShape))
|
||||||
unit.item,
|
.or_else(|_| token_nodes.expand_syntax(StringExpressionShape))
|
||||||
Tag {
|
.or_else(|_| token_nodes.expand_syntax(DelimitedSquareShape))
|
||||||
span: atom.span,
|
|
||||||
anchor: None,
|
|
||||||
},
|
|
||||||
)),
|
|
||||||
|
|
||||||
UnspannedAtomicToken::SquareDelimited { nodes, .. } => {
|
|
||||||
expand_delimited_square(&nodes, atom.span, context)
|
|
||||||
}
|
|
||||||
|
|
||||||
UnspannedAtomicToken::Word { .. } => {
|
|
||||||
let end = expand_syntax(&BareTailShape, token_nodes, context)?;
|
|
||||||
Ok(hir::Expression::bare(atom.span.until_option(end)))
|
|
||||||
}
|
|
||||||
|
|
||||||
other => other
|
|
||||||
.into_atomic_token(atom.span)
|
|
||||||
.to_hir(context, "expression"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FallibleColorSyntax for AnyExpressionStartShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"AnyExpressionStartShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = token_nodes.spanned(|token_nodes| {
|
|
||||||
expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"expression",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Spanned {
|
|
||||||
item: Err(_err),
|
|
||||||
span,
|
|
||||||
} => {
|
|
||||||
token_nodes.color_shape(FlatShape::Error.spanned(span));
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
Spanned {
|
|
||||||
item: Ok(value), ..
|
|
||||||
} => value,
|
|
||||||
};
|
|
||||||
|
|
||||||
match atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Size { number, unit } => token_nodes.color_shape(
|
|
||||||
FlatShape::Size {
|
|
||||||
number: number.span(),
|
|
||||||
unit: unit.span,
|
|
||||||
}
|
|
||||||
.spanned(atom.span),
|
|
||||||
),
|
|
||||||
|
|
||||||
UnspannedAtomicToken::SquareDelimited { nodes, spans } => {
|
|
||||||
token_nodes.child(
|
|
||||||
(&nodes[..]).spanned(atom.span),
|
|
||||||
context.source.clone(),
|
|
||||||
|tokens| {
|
|
||||||
color_delimited_square(spans, tokens, atom.span, context);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
UnspannedAtomicToken::Word { .. } | UnspannedAtomicToken::Dot { .. } => {
|
|
||||||
token_nodes.color_shape(FlatShape::Word.spanned(atom.span));
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct BareTailShape;
|
|
||||||
|
|
||||||
impl FallibleColorSyntax for BareTailShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"BareTailShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let len = token_nodes.state().shapes().len();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let word =
|
|
||||||
color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context);
|
|
||||||
|
|
||||||
if word.is_ok() {
|
|
||||||
// if a word was found, continue
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// if a word wasn't found, try to find a dot
|
|
||||||
|
|
||||||
// try to find a dot
|
|
||||||
let dot = color_fallible_syntax_with(
|
|
||||||
&ColorableDotShape,
|
|
||||||
&FlatShape::Word,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
);
|
|
||||||
|
|
||||||
match dot {
|
|
||||||
// if a dot was found, try to find another word
|
|
||||||
Ok(_) => continue,
|
|
||||||
// otherwise, we're done
|
|
||||||
Err(_) => break,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if token_nodes.state().shapes().len() > len {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(ShellError::syntax_error(
|
|
||||||
"No tokens matched BareTailShape".spanned_unknown(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandSyntax for BareTailShape {
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"word continuation"
|
|
||||||
}
|
|
||||||
|
|
||||||
type Output = Option<Span>;
|
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Option<Span>, ParseError> {
|
|
||||||
let mut end: Option<Span> = None;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
match expand_syntax(&BareShape, token_nodes, context) {
|
|
||||||
Ok(bare) => {
|
|
||||||
end = Some(bare.span);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(_) => match expand_syntax(&DotShape, token_nodes, context) {
|
|
||||||
Ok(dot) => {
|
|
||||||
end = Some(dot);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(_) => break,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(end)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,55 +1,24 @@
|
|||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::ExpandSyntax;
|
||||||
color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode,
|
use crate::hir::SpannedExpression;
|
||||||
};
|
use crate::{hir, hir::TokensIterator};
|
||||||
use crate::{hir, hir::TokensIterator, Delimiter, FlatShape};
|
|
||||||
use nu_errors::ParseError;
|
use nu_errors::ParseError;
|
||||||
use nu_source::{Span, SpannedItem, Tag};
|
|
||||||
|
|
||||||
pub fn expand_delimited_square(
|
|
||||||
children: &[TokenNode],
|
|
||||||
span: Span,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<hir::Expression, ParseError> {
|
|
||||||
let mut tokens = TokensIterator::new(&children, span, context.source.clone(), false);
|
|
||||||
|
|
||||||
let list = expand_syntax(&ExpressionListShape, &mut tokens, context);
|
|
||||||
|
|
||||||
Ok(hir::Expression::list(
|
|
||||||
list?.exprs.item,
|
|
||||||
Tag { span, anchor: None },
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn color_delimited_square(
|
|
||||||
(open, close): (Span, Span),
|
|
||||||
token_nodes: &mut TokensIterator,
|
|
||||||
_span: Span,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) {
|
|
||||||
token_nodes.color_shape(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open));
|
|
||||||
let _list = color_syntax(&ExpressionListShape, token_nodes, context);
|
|
||||||
token_nodes.color_shape(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct DelimitedShape;
|
pub struct DelimitedSquareShape;
|
||||||
|
|
||||||
impl ColorSyntax for DelimitedShape {
|
impl ExpandSyntax for DelimitedSquareShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = (Delimiter, Span, Span);
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"DelimitedShape"
|
"delimited square"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
(delimiter, open, close): &(Delimiter, Span, Span),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Self::Info {
|
let exprs = token_nodes.square()?;
|
||||||
token_nodes.color_shape(FlatShape::OpenDelimiter(*delimiter).spanned(*open));
|
|
||||||
color_syntax(&ExpressionListShape, token_nodes, context);
|
Ok(hir::Expression::list(exprs.item).into_expr(exprs.span))
|
||||||
token_nodes.color_shape(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,88 +1,62 @@
|
|||||||
use crate::hir::syntax_shape::expression::atom::{
|
|
||||||
expand_atom, ExpansionRule, UnspannedAtomicToken,
|
|
||||||
};
|
|
||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape,
|
expression::expand_file_path, BarePathShape, DecimalShape, ExpandContext, ExpandSyntax,
|
||||||
|
FlatShape, IntShape, StringShape,
|
||||||
};
|
};
|
||||||
use crate::{hir, hir::TokensIterator};
|
use crate::hir::{Expression, SpannedExpression, TokensIterator};
|
||||||
use nu_errors::{ParseError, ShellError};
|
use crate::parse::token_tree::ExternalWordType;
|
||||||
use nu_source::SpannedItem;
|
use nu_errors::ParseError;
|
||||||
|
use nu_source::{HasSpan, Span};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct FilePathShape;
|
pub struct FilePathShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for FilePathShape {
|
impl ExpandSyntax for FilePathShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"FilePathShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"file path",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Err(_) => return Ok(()),
|
|
||||||
Ok(atom) => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
match atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Word { .. }
|
|
||||||
| UnspannedAtomicToken::String { .. }
|
|
||||||
| UnspannedAtomicToken::Number { .. }
|
|
||||||
| UnspannedAtomicToken::Size { .. } => {
|
|
||||||
token_nodes.color_shape(FlatShape::Path.spanned(atom.span));
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandExpression for FilePathShape {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"file path"
|
"file path"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
token_nodes
|
||||||
let atom = expand_atom(
|
.expand_syntax(BarePathShape)
|
||||||
token_nodes,
|
.or_else(|_| token_nodes.expand_syntax(ExternalWordShape))
|
||||||
"file path",
|
.map(|span| file_path(span, token_nodes.context()).into_expr(span))
|
||||||
context,
|
.or_else(|_| {
|
||||||
ExpansionRule::new().allow_external_word(),
|
token_nodes.expand_syntax(StringShape).map(|syntax| {
|
||||||
)?;
|
file_path(syntax.inner, token_nodes.context()).into_expr(syntax.span)
|
||||||
|
})
|
||||||
match atom.unspanned {
|
})
|
||||||
UnspannedAtomicToken::Word { text: body }
|
.or_else(|_| {
|
||||||
| UnspannedAtomicToken::ExternalWord { text: body }
|
token_nodes
|
||||||
| UnspannedAtomicToken::String { body } => {
|
.expand_syntax(IntShape)
|
||||||
let path = expand_file_path(body.slice(context.source), context);
|
.or_else(|_| token_nodes.expand_syntax(DecimalShape))
|
||||||
Ok(hir::Expression::file_path(path, atom.span))
|
.map(|number| {
|
||||||
}
|
file_path(number.span(), token_nodes.context()).into_expr(number.span())
|
||||||
|
})
|
||||||
UnspannedAtomicToken::Number { .. } | UnspannedAtomicToken::Size { .. } => {
|
})
|
||||||
let path = atom.span.slice(context.source);
|
.map_err(|_| token_nodes.err_next_token("file path"))
|
||||||
Ok(hir::Expression::file_path(path, atom.span))
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => atom.to_hir(context, "file path"),
|
fn file_path(text: Span, context: &ExpandContext) -> Expression {
|
||||||
}
|
Expression::FilePath(expand_file_path(text.slice(context.source), context))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ExternalWordShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for ExternalWordShape {
|
||||||
|
type Output = Result<Span, ParseError>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"external word"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||||
|
token_nodes.expand_token(ExternalWordType, |span| Ok((FlatShape::ExternalWord, span)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,18 +1,15 @@
|
|||||||
|
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||||
use crate::{
|
use crate::{
|
||||||
hir,
|
hir,
|
||||||
hir::syntax_shape::{
|
hir::syntax_shape::{AnyExpressionShape, ExpandSyntax, MaybeSpaceShape},
|
||||||
color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced,
|
|
||||||
AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule,
|
|
||||||
MaybeSpaceShape, SpaceShape,
|
|
||||||
},
|
|
||||||
hir::TokensIterator,
|
hir::TokensIterator,
|
||||||
};
|
};
|
||||||
use nu_errors::ParseError;
|
use derive_new::new;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ExpressionListSyntax {
|
pub struct ExpressionListSyntax {
|
||||||
pub exprs: Spanned<Vec<hir::Expression>>,
|
pub exprs: Spanned<Vec<hir::SpannedExpression>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasSpan for ExpressionListSyntax {
|
impl HasSpan for ExpressionListSyntax {
|
||||||
@ -40,99 +37,60 @@ impl ExpandSyntax for ExpressionListShape {
|
|||||||
"expression list"
|
"expression list"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &mut TokensIterator<'_>) -> ExpressionListSyntax {
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<ExpressionListSyntax, ParseError> {
|
|
||||||
let mut exprs = vec![];
|
|
||||||
|
|
||||||
let start = token_nodes.span_at_cursor();
|
|
||||||
|
|
||||||
if token_nodes.at_end_possible_ws() {
|
|
||||||
return Ok(ExpressionListSyntax {
|
|
||||||
exprs: exprs.spanned(start),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?;
|
|
||||||
|
|
||||||
exprs.push(expr);
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if token_nodes.at_end_possible_ws() {
|
|
||||||
let end = token_nodes.span_at_cursor();
|
|
||||||
return Ok(ExpressionListSyntax {
|
|
||||||
exprs: exprs.spanned(start.until(end)),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?;
|
|
||||||
|
|
||||||
exprs.push(expr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColorSyntax for ExpressionListShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"ExpressionListShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The intent of this method is to fully color an expression list shape infallibly.
|
|
||||||
/// This means that if we can't expand a token into an expression, we fall back to
|
|
||||||
/// a simpler coloring strategy.
|
|
||||||
///
|
|
||||||
/// This would apply to something like `where x >`, which includes an incomplete
|
|
||||||
/// binary operator. Since we will fail to process it as a binary operator, we'll
|
|
||||||
/// fall back to a simpler coloring and move on.
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) {
|
|
||||||
// We encountered a parsing error and will continue with simpler coloring ("backoff
|
// We encountered a parsing error and will continue with simpler coloring ("backoff
|
||||||
// coloring mode")
|
// coloring mode")
|
||||||
let mut backoff = false;
|
let mut backoff = false;
|
||||||
|
|
||||||
// Consume any leading whitespace
|
let mut exprs = vec![];
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
||||||
|
let start = token_nodes.span_at_cursor();
|
||||||
|
|
||||||
|
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||||
|
|
||||||
|
if token_nodes.at_end() {
|
||||||
|
return ExpressionListSyntax {
|
||||||
|
exprs: exprs.spanned(start),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let expr = token_nodes.expand_syntax(AnyExpressionShape);
|
||||||
|
|
||||||
|
match expr {
|
||||||
|
Ok(expr) => exprs.push(expr),
|
||||||
|
Err(_) => backoff = true,
|
||||||
|
}
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
// If we reached the very end of the token stream, we're done
|
|
||||||
if token_nodes.at_end() {
|
if token_nodes.at_end() {
|
||||||
return;
|
let end = token_nodes.span_at_cursor();
|
||||||
|
return ExpressionListSyntax {
|
||||||
|
exprs: exprs.spanned(start.until(end)),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if backoff {
|
if backoff {
|
||||||
let len = token_nodes.state().shapes().len();
|
let len = token_nodes.state().shapes().len();
|
||||||
|
|
||||||
// If we previously encountered a parsing error, use backoff coloring mode
|
// If we previously encountered a parsing error, use backoff coloring mode
|
||||||
color_syntax(&SimplestExpression, token_nodes, context);
|
token_nodes
|
||||||
|
.expand_infallible(SimplestExpression::new(vec!["expression".to_string()]));
|
||||||
|
|
||||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
||||||
// This should never happen, but if it does, a panic is better than an infinite loop
|
// This should never happen, but if it does, a panic is better than an infinite loop
|
||||||
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
|
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Try to color the head of the stream as an expression
|
let expr = token_nodes.atomic_parse(|token_nodes| {
|
||||||
if color_fallible_syntax(&AnyExpressionShape, token_nodes, context).is_err() {
|
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||||
// If no expression was found, switch to backoff coloring mode
|
token_nodes.expand_syntax(AnyExpressionShape)
|
||||||
|
});
|
||||||
|
|
||||||
backoff = true;
|
match expr {
|
||||||
continue;
|
Ok(expr) => exprs.push(expr),
|
||||||
}
|
Err(_) => {
|
||||||
|
backoff = true;
|
||||||
// If an expression was found, consume a space
|
}
|
||||||
if color_fallible_syntax(&SpaceShape, token_nodes, context).is_err() {
|
|
||||||
// If no space was found, we're either at the end or there's an error.
|
|
||||||
// Either way, switch to backoff coloring mode. If we're at the end
|
|
||||||
// it won't have any consequences.
|
|
||||||
backoff = true;
|
|
||||||
}
|
}
|
||||||
// Otherwise, move on to the next expression
|
// Otherwise, move on to the next expression
|
||||||
}
|
}
|
||||||
@ -141,69 +99,72 @@ impl ColorSyntax for ExpressionListShape {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
|
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Clone, new)]
|
||||||
pub struct BackoffColoringMode;
|
pub struct BackoffColoringMode {
|
||||||
|
allowed: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
impl ColorSyntax for BackoffColoringMode {
|
impl ExpandSyntax for BackoffColoringMode {
|
||||||
type Info = ();
|
type Output = Option<Span>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"BackoffColoringMode"
|
"BackoffColoringMode"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Option<Span> {
|
||||||
&self,
|
|
||||||
_input: &Self::Input,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Self::Info {
|
|
||||||
loop {
|
loop {
|
||||||
if token_nodes.at_end() {
|
if token_nodes.at_end() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let len = token_nodes.state().shapes().len();
|
let len = token_nodes.state().shapes().len();
|
||||||
color_syntax(&SimplestExpression, token_nodes, context);
|
token_nodes.expand_infallible(SimplestExpression::new(self.allowed.clone()));
|
||||||
|
|
||||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
||||||
// This shouldn't happen, but if it does, a panic is better than an infinite loop
|
// This shouldn't happen, but if it does, a panic is better than an infinite loop
|
||||||
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes());
|
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
|
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
|
||||||
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
|
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
|
||||||
/// expression, fall back to simple coloring.
|
/// expression, fall back to simple coloring.
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Clone, new)]
|
||||||
pub struct SimplestExpression;
|
pub struct SimplestExpression {
|
||||||
|
valid_shapes: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
impl ColorSyntax for SimplestExpression {
|
impl ExpandSyntax for SimplestExpression {
|
||||||
type Info = ();
|
type Output = Span;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"SimplestExpression"
|
"SimplestExpression"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Span {
|
||||||
&self,
|
if token_nodes.at_end() {
|
||||||
_input: &(),
|
return Span::unknown();
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
}
|
||||||
context: &ExpandContext,
|
|
||||||
) {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"any token",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
);
|
|
||||||
|
|
||||||
match atom {
|
let source = token_nodes.source();
|
||||||
Err(_) => {}
|
|
||||||
Ok(atom) => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
let peeked = token_nodes.peek();
|
||||||
|
|
||||||
|
match peeked.not_eof("simplest expression") {
|
||||||
|
Err(_) => token_nodes.span_at_cursor(),
|
||||||
|
Ok(peeked) => {
|
||||||
|
let token = peeked.commit();
|
||||||
|
|
||||||
|
for shape in FlatShape::shapes(token, &source) {
|
||||||
|
token_nodes.color_err(shape, self.valid_shapes.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
token.span()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,169 +1,109 @@
|
|||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{ExpandSyntax, FlatShape};
|
||||||
expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule,
|
use crate::hir::{Expression, SpannedExpression};
|
||||||
FallibleColorSyntax, FlatShape, TestSyntax,
|
use crate::hir::{RawNumber, TokensIterator};
|
||||||
};
|
use crate::parse::token_tree::{DecimalType, IntType};
|
||||||
use crate::hir::tokens_iterator::Peeked;
|
use nu_errors::ParseError;
|
||||||
use crate::parse::tokens::UnspannedToken;
|
use nu_source::HasSpan;
|
||||||
use crate::{
|
|
||||||
hir,
|
|
||||||
hir::{RawNumber, TokensIterator},
|
|
||||||
};
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_source::{Spanned, SpannedItem};
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct NumberShape;
|
pub struct NumberExpressionShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for NumberExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
impl ExpandExpression for NumberShape {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"number"
|
"number"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
let source = token_nodes.source();
|
||||||
parse_single_node(token_nodes, "Number", |token, token_span, err| {
|
|
||||||
Ok(match token {
|
token_nodes
|
||||||
UnspannedToken::GlobPattern
|
.expand_syntax(NumberShape)
|
||||||
| UnspannedToken::CompareOperator(..)
|
.map(|number| Expression::number(number.to_number(&source)).into_expr(number.span()))
|
||||||
| UnspannedToken::EvaluationOperator(..) => return Err(err.error()),
|
|
||||||
UnspannedToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
|
||||||
hir::Expression::it_variable(tag, token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalCommand(tag) => {
|
|
||||||
hir::Expression::external_command(tag, token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalWord => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
"number",
|
|
||||||
"syntax error".spanned(token_span),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
UnspannedToken::Variable(tag) => hir::Expression::variable(tag, token_span),
|
|
||||||
UnspannedToken::Number(number) => {
|
|
||||||
hir::Expression::number(number.to_number(context.source), token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::Bare => hir::Expression::bare(token_span),
|
|
||||||
UnspannedToken::String(tag) => hir::Expression::string(tag, token_span),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FallibleColorSyntax for NumberShape {
|
#[derive(Debug, Copy, Clone)]
|
||||||
type Info = ();
|
pub struct IntExpressionShape;
|
||||||
type Input = ();
|
|
||||||
|
impl ExpandSyntax for IntExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"NumberShape"
|
"integer"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
_input: &(),
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
context: &ExpandContext,
|
let source = token_nodes.source();
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = token_nodes.spanned(|token_nodes| {
|
|
||||||
expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
|
|
||||||
});
|
|
||||||
|
|
||||||
let atom = match atom {
|
token_nodes.expand_token(IntType, |number| {
|
||||||
Spanned { item: Err(_), span } => {
|
Ok((
|
||||||
token_nodes.color_shape(FlatShape::Error.spanned(span));
|
FlatShape::Int,
|
||||||
return Ok(());
|
Expression::number(number.to_number(&source)),
|
||||||
}
|
))
|
||||||
Spanned { item: Ok(atom), .. } => atom,
|
})
|
||||||
};
|
|
||||||
|
|
||||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct IntShape;
|
pub struct IntShape;
|
||||||
|
|
||||||
impl ExpandExpression for IntShape {
|
impl ExpandSyntax for IntShape {
|
||||||
|
type Output = Result<RawNumber, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"integer"
|
"integer"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<RawNumber, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
token_nodes.expand_token(IntType, |number| Ok((FlatShape::Int, number)))
|
||||||
parse_single_node(token_nodes, "Integer", |token, token_span, err| {
|
|
||||||
Ok(match token {
|
|
||||||
UnspannedToken::GlobPattern
|
|
||||||
| UnspannedToken::CompareOperator(..)
|
|
||||||
| UnspannedToken::EvaluationOperator(..)
|
|
||||||
| UnspannedToken::ExternalWord => return Err(err.error()),
|
|
||||||
UnspannedToken::Variable(span) if span.slice(context.source) == "it" => {
|
|
||||||
hir::Expression::it_variable(span, token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalCommand(span) => {
|
|
||||||
hir::Expression::external_command(span, token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::Variable(span) => hir::Expression::variable(span, token_span),
|
|
||||||
UnspannedToken::Number(number @ RawNumber::Int(_)) => {
|
|
||||||
hir::Expression::number(number.to_number(context.source), token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::Number(_) => return Err(err.error()),
|
|
||||||
UnspannedToken::Bare => hir::Expression::bare(token_span),
|
|
||||||
UnspannedToken::String(span) => hir::Expression::string(span, token_span),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FallibleColorSyntax for IntShape {
|
#[derive(Debug, Copy, Clone)]
|
||||||
type Info = ();
|
pub struct DecimalShape;
|
||||||
type Input = ();
|
|
||||||
|
impl ExpandSyntax for DecimalShape {
|
||||||
|
type Output = Result<RawNumber, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"IntShape"
|
"decimal"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<RawNumber, ParseError> {
|
||||||
) -> Result<(), ShellError> {
|
token_nodes.expand_token(DecimalType, |number| Ok((FlatShape::Decimal, number)))
|
||||||
let atom = token_nodes.spanned(|token_nodes| {
|
|
||||||
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
|
|
||||||
});
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Spanned { item: Err(_), span } => {
|
|
||||||
token_nodes.color_shape(FlatShape::Error.spanned(span));
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
Spanned { item: Ok(atom), .. } => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TestSyntax for NumberShape {
|
#[derive(Debug, Copy, Clone)]
|
||||||
fn test<'a, 'b>(
|
pub struct NumberShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for NumberShape {
|
||||||
|
type Output = Result<RawNumber, ParseError>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"decimal"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
_context: &ExpandContext,
|
) -> Result<RawNumber, ParseError> {
|
||||||
) -> Option<Peeked<'a, 'b>> {
|
token_nodes
|
||||||
let peeked = token_nodes.peek_any();
|
.expand_syntax(IntShape)
|
||||||
|
.or_else(|_| token_nodes.expand_syntax(DecimalShape))
|
||||||
match peeked.node {
|
|
||||||
Some(token) if token.is_number() => Some(peeked),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,77 +1,66 @@
|
|||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
expand_atom, expand_bare, expression::expand_file_path, ExpandContext, ExpandExpression,
|
expand_bare, expression::expand_file_path, BarePathShape, ExpandContext, ExpandSyntax,
|
||||||
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, UnspannedAtomicToken,
|
ExternalWordShape, StringShape,
|
||||||
};
|
};
|
||||||
|
use crate::hir::{Expression, SpannedExpression};
|
||||||
use crate::parse::operator::EvaluationOperator;
|
use crate::parse::operator::EvaluationOperator;
|
||||||
use crate::parse::tokens::{Token, UnspannedToken};
|
use crate::{hir, hir::TokensIterator, Token};
|
||||||
use crate::{hir, hir::TokensIterator, TokenNode};
|
use nu_errors::ParseError;
|
||||||
use nu_errors::{ParseError, ShellError};
|
use nu_source::Span;
|
||||||
|
|
||||||
use nu_protocol::ShellTypeName;
|
|
||||||
use nu_source::{Span, SpannedItem};
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct PatternShape;
|
pub struct PatternShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for PatternShape {
|
impl ExpandSyntax for PatternShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"PatternShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes.atomic(|token_nodes| {
|
|
||||||
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
|
|
||||||
|
|
||||||
match &atom.unspanned {
|
|
||||||
UnspannedAtomicToken::GlobPattern { .. } | UnspannedAtomicToken::Word { .. } => {
|
|
||||||
token_nodes.color_shape(FlatShape::GlobPattern.spanned(atom.span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
other => Err(ShellError::type_error(
|
|
||||||
"pattern",
|
|
||||||
other.type_name().spanned(atom.span),
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandExpression for PatternShape {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"glob pattern"
|
"glob pattern"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<hir::SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
let (inner, outer) = token_nodes
|
||||||
let atom = expand_atom(
|
.expand_syntax(BarePatternShape)
|
||||||
token_nodes,
|
.or_else(|_| token_nodes.expand_syntax(BarePathShape))
|
||||||
"pattern",
|
.or_else(|_| token_nodes.expand_syntax(ExternalWordShape))
|
||||||
context,
|
.map(|span| (span, span))
|
||||||
ExpansionRule::new().allow_external_word(),
|
.or_else(|_| {
|
||||||
)?;
|
token_nodes
|
||||||
|
.expand_syntax(StringShape)
|
||||||
|
.map(|syntax| (syntax.inner, syntax.span))
|
||||||
|
})
|
||||||
|
.map_err(|_| token_nodes.err_next_token("glob pattern"))?;
|
||||||
|
|
||||||
match atom.unspanned {
|
Ok(file_pattern(inner, outer, token_nodes.context()))
|
||||||
UnspannedAtomicToken::Word { text: body }
|
}
|
||||||
| UnspannedAtomicToken::String { body }
|
}
|
||||||
| UnspannedAtomicToken::ExternalWord { text: body }
|
|
||||||
| UnspannedAtomicToken::GlobPattern { pattern: body } => {
|
fn file_pattern(body: Span, outer: Span, context: &ExpandContext) -> SpannedExpression {
|
||||||
let path = expand_file_path(body.slice(context.source), context);
|
let path = expand_file_path(body.slice(context.source), context);
|
||||||
Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span))
|
Expression::pattern(path.to_string_lossy()).into_expr(outer)
|
||||||
}
|
}
|
||||||
_ => atom.to_hir(context, "pattern"),
|
|
||||||
}
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct PatternExpressionShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for PatternExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"pattern"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
|
token_nodes.expand_syntax(BarePatternShape).map(|span| {
|
||||||
|
let path = expand_file_path(span.slice(&token_nodes.source()), token_nodes.context());
|
||||||
|
Expression::pattern(path.to_string_lossy()).into_expr(span)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,30 +68,17 @@ impl ExpandExpression for PatternShape {
|
|||||||
pub struct BarePatternShape;
|
pub struct BarePatternShape;
|
||||||
|
|
||||||
impl ExpandSyntax for BarePatternShape {
|
impl ExpandSyntax for BarePatternShape {
|
||||||
type Output = Span;
|
type Output = Result<Span, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"bare pattern"
|
"bare pattern"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||||
&self,
|
expand_bare(token_nodes, |token| match token.unspanned() {
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
Token::Bare
|
||||||
context: &ExpandContext,
|
| Token::EvaluationOperator(EvaluationOperator::Dot)
|
||||||
) -> Result<Span, ParseError> {
|
| Token::GlobPattern => true,
|
||||||
expand_bare(token_nodes, context, |token| match token {
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
| TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
|
||||||
..
|
|
||||||
})
|
|
||||||
| TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::GlobPattern,
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
|
|
||||||
_ => false,
|
_ => false,
|
||||||
})
|
})
|
||||||
|
@ -1,103 +1,47 @@
|
|||||||
use crate::hir::syntax_shape::expression::UnspannedAtomicToken;
|
use crate::hir::syntax_shape::{AnyExpressionStartShape, ExpandSyntax, FlatShape};
|
||||||
use crate::hir::syntax_shape::{
|
use crate::hir::TokensIterator;
|
||||||
color_fallible_syntax, expand_atom, expand_expr, AnyExpressionShape, ExpandContext,
|
use crate::hir::{Expression, SpannedExpression};
|
||||||
ExpandExpression, ExpansionRule, FallibleColorSyntax, FlatShape,
|
use crate::parse::token_tree::DotDotType;
|
||||||
};
|
use nu_errors::ParseError;
|
||||||
use crate::parse::operator::EvaluationOperator;
|
use nu_source::{HasSpan, Span};
|
||||||
use crate::parse::token_tree::TokenNode;
|
|
||||||
use crate::parse::tokens::{Token, UnspannedToken};
|
|
||||||
use crate::{hir, hir::TokensIterator};
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_protocol::SpannedTypeName;
|
|
||||||
use nu_source::SpannedItem;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct RangeShape;
|
pub struct RangeShape;
|
||||||
|
|
||||||
impl ExpandExpression for RangeShape {
|
impl ExpandSyntax for RangeShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"range"
|
"range"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
let left = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
let left = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
||||||
|
let dotdot = token_nodes.expand_syntax(DotDotShape)?;
|
||||||
|
let right = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
||||||
|
|
||||||
let atom = expand_atom(
|
let span = left.span.until(right.span);
|
||||||
token_nodes,
|
|
||||||
"..",
|
|
||||||
context,
|
|
||||||
ExpansionRule::new().allow_eval_operator(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let span = match atom.unspanned {
|
Ok(Expression::range(left, dotdot, right).into_expr(span))
|
||||||
UnspannedAtomicToken::DotDot { text } => text,
|
|
||||||
_ => return Err(ParseError::mismatch("..", atom.spanned_type_name())),
|
|
||||||
};
|
|
||||||
|
|
||||||
let right = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
Ok(hir::Expression::range(left, span, right))
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FallibleColorSyntax for RangeShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"RangeShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?;
|
|
||||||
color_fallible_syntax(&DotDotShape, token_nodes, context)?;
|
|
||||||
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
struct DotDotShape;
|
struct DotDotShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for DotDotShape {
|
impl ExpandSyntax for DotDotShape {
|
||||||
type Info = ();
|
type Output = Result<Span, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
".."
|
"dotdot"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||||
&self,
|
token_nodes.expand_token(DotDotType, |token| Ok((FlatShape::DotDot, token.span())))
|
||||||
_input: &Self::Input,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
_context: &ExpandContext,
|
|
||||||
) -> Result<Self::Info, ShellError> {
|
|
||||||
let peeked = token_nodes.peek_any().not_eof("..")?;
|
|
||||||
match &peeked.node {
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot),
|
|
||||||
span,
|
|
||||||
}) => {
|
|
||||||
peeked.commit();
|
|
||||||
token_nodes.color_shape(FlatShape::DotDot.spanned(span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
token => Err(ShellError::type_error("..", token.spanned_type_name())),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,90 +1,103 @@
|
|||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{ExpandSyntax, FlatShape, NumberShape, VariableShape};
|
||||||
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
|
use crate::hir::TokensIterator;
|
||||||
ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, UnspannedAtomicToken,
|
use crate::hir::{Expression, SpannedExpression};
|
||||||
};
|
use crate::parse::token_tree::{BareType, StringType};
|
||||||
use crate::hir::tokens_iterator::Peeked;
|
use nu_errors::ParseError;
|
||||||
use crate::parse::tokens::UnspannedToken;
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||||
use crate::{hir, hir::TokensIterator};
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_source::SpannedItem;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct StringShape;
|
pub struct CoerceStringShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for StringShape {
|
impl ExpandSyntax for CoerceStringShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = FlatShape;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"StringShape"
|
"StringShape"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
input: &FlatShape,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<(), ShellError> {
|
token_nodes
|
||||||
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
|
.expand_token(StringType, |(inner, outer)| {
|
||||||
|
Ok((
|
||||||
let atom = match atom {
|
FlatShape::String,
|
||||||
Err(_) => return Ok(()),
|
Expression::string(inner).into_expr(outer),
|
||||||
Ok(atom) => atom,
|
))
|
||||||
};
|
})
|
||||||
|
.or_else(|_| {
|
||||||
match atom {
|
token_nodes.expand_token(BareType, |span| {
|
||||||
AtomicToken {
|
Ok((FlatShape::String, Expression::string(span).into_expr(span)))
|
||||||
unspanned: UnspannedAtomicToken::String { .. },
|
})
|
||||||
span,
|
})
|
||||||
} => token_nodes.color_shape((*input).spanned(span)),
|
.or_else(|_| {
|
||||||
atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
token_nodes
|
||||||
}
|
.expand_syntax(NumberShape)
|
||||||
|
.map(|number| Expression::string(number.span()).into_expr(number.span()))
|
||||||
Ok(())
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpandExpression for StringShape {
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct StringExpressionShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for StringExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"string"
|
"string"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
token_nodes.expand_syntax(VariableShape).or_else(|_| {
|
||||||
parse_single_node(token_nodes, "String", |token, token_span, err| {
|
token_nodes.expand_token(StringType, |(inner, outer)| {
|
||||||
Ok(match token {
|
Ok((
|
||||||
UnspannedToken::GlobPattern
|
FlatShape::String,
|
||||||
| UnspannedToken::CompareOperator(..)
|
Expression::string(inner).into_expr(outer),
|
||||||
| UnspannedToken::EvaluationOperator(..)
|
))
|
||||||
| UnspannedToken::ExternalWord => return Err(err.error()),
|
|
||||||
UnspannedToken::Variable(span) => {
|
|
||||||
expand_variable(span, token_span, &context.source)
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalCommand(span) => {
|
|
||||||
hir::Expression::external_command(span, token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::Number(_) => hir::Expression::bare(token_span),
|
|
||||||
UnspannedToken::Bare => hir::Expression::bare(token_span),
|
|
||||||
UnspannedToken::String(span) => hir::Expression::string(span, token_span),
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TestSyntax for StringShape {
|
#[derive(Debug, Copy, Clone)]
|
||||||
fn test<'a, 'b>(
|
pub struct StringSyntax {
|
||||||
&self,
|
pub inner: Span,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
pub span: Span,
|
||||||
_context: &ExpandContext,
|
}
|
||||||
) -> Option<Peeked<'a, 'b>> {
|
|
||||||
let peeked = token_nodes.peek_any();
|
|
||||||
|
|
||||||
match peeked.node {
|
impl HasSpan for StringSyntax {
|
||||||
Some(token) if token.is_string() => Some(peeked),
|
fn span(&self) -> Span {
|
||||||
_ => None,
|
self.span
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for StringSyntax {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
b::primitive(self.span.slice(source))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct StringShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for StringShape {
|
||||||
|
type Output = Result<StringSyntax, ParseError>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"string"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
) -> Result<StringSyntax, ParseError> {
|
||||||
|
token_nodes.expand_token(StringType, |(inner, outer)| {
|
||||||
|
Ok((FlatShape::String, StringSyntax { inner, span: outer }))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,16 +1,19 @@
|
|||||||
use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax};
|
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||||
use crate::parse::tokens::RawNumber;
|
use crate::hir::syntax_shape::ExpandSyntax;
|
||||||
use crate::parse::tokens::Token;
|
use crate::hir::TokensIterator;
|
||||||
use crate::parse::tokens::UnspannedToken;
|
use crate::hir::{Expression, SpannedExpression};
|
||||||
|
use crate::parse::number::RawNumber;
|
||||||
|
use crate::parse::token_tree::BareType;
|
||||||
use crate::parse::unit::Unit;
|
use crate::parse::unit::Unit;
|
||||||
use crate::{hir::TokensIterator, TokenNode};
|
|
||||||
use nom::branch::alt;
|
use nom::branch::alt;
|
||||||
use nom::bytes::complete::tag;
|
use nom::bytes::complete::tag;
|
||||||
use nom::character::complete::digit1;
|
use nom::character::complete::digit1;
|
||||||
use nom::combinator::{all_consuming, opt, value};
|
use nom::combinator::{all_consuming, opt, value};
|
||||||
use nom::IResult;
|
use nom::IResult;
|
||||||
use nu_errors::ParseError;
|
use nu_errors::ParseError;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct UnitSyntax {
|
pub struct UnitSyntax {
|
||||||
@ -18,6 +21,17 @@ pub struct UnitSyntax {
|
|||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl UnitSyntax {
|
||||||
|
pub fn into_expr(self, source: &Text) -> SpannedExpression {
|
||||||
|
let UnitSyntax {
|
||||||
|
unit: (number, unit),
|
||||||
|
span,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
Expression::size(number.to_number(source), *unit).into_expr(span)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for UnitSyntax {
|
impl PrettyDebugWithSource for UnitSyntax {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::typed(
|
b::typed(
|
||||||
@ -33,42 +47,60 @@ impl HasSpan for UnitSyntax {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct UnitExpressionShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for UnitExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"unit expression"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
|
token_nodes
|
||||||
|
.expand_syntax(UnitShape)
|
||||||
|
.map(|unit| unit.into_expr(&token_nodes.source()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct UnitShape;
|
pub struct UnitShape;
|
||||||
|
|
||||||
impl ExpandSyntax for UnitShape {
|
impl ExpandSyntax for UnitShape {
|
||||||
type Output = UnitSyntax;
|
type Output = Result<UnitSyntax, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"unit"
|
"unit"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<UnitSyntax, ParseError> {
|
) -> Result<UnitSyntax, ParseError> {
|
||||||
let peeked = token_nodes.peek_any().not_eof("unit")?;
|
let source = token_nodes.source();
|
||||||
|
|
||||||
let span = match peeked.node {
|
token_nodes.expand_token(BareType, |span| {
|
||||||
TokenNode::Token(Token {
|
let unit = unit_size(span.slice(&source), span);
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
span,
|
|
||||||
}) => *span,
|
|
||||||
_ => return Err(peeked.type_error("unit")),
|
|
||||||
};
|
|
||||||
|
|
||||||
let unit = unit_size(span.slice(context.source), span);
|
let (_, (number, unit)) = match unit {
|
||||||
|
Err(_) => return Err(ParseError::mismatch("unit", "word".spanned(span))),
|
||||||
|
Ok((number, unit)) => (number, unit),
|
||||||
|
};
|
||||||
|
|
||||||
let (_, (number, unit)) = match unit {
|
Ok((
|
||||||
Err(_) => return Err(ParseError::mismatch("unit", "word".spanned(span))),
|
FlatShape::Size {
|
||||||
Ok((number, unit)) => (number, unit),
|
number: number.span(),
|
||||||
};
|
unit: unit.span,
|
||||||
|
},
|
||||||
peeked.commit();
|
UnitSyntax {
|
||||||
Ok(UnitSyntax {
|
unit: (number, unit),
|
||||||
unit: (number, unit),
|
span,
|
||||||
span,
|
},
|
||||||
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,13 +1,16 @@
|
|||||||
use crate::parse::flag::{Flag, FlagKind};
|
use crate::parse::flag::{Flag, FlagKind};
|
||||||
|
use crate::parse::number::RawNumber;
|
||||||
use crate::parse::operator::EvaluationOperator;
|
use crate::parse::operator::EvaluationOperator;
|
||||||
use crate::parse::token_tree::{Delimiter, TokenNode};
|
use crate::parse::token_tree::{Delimiter, SpannedToken, Token};
|
||||||
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
use nu_protocol::ShellTypeName;
|
||||||
use nu_source::{HasSpan, Span, Spanned, SpannedItem, Text};
|
use nu_source::{DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem, Text};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub enum FlatShape {
|
pub enum FlatShape {
|
||||||
OpenDelimiter(Delimiter),
|
OpenDelimiter(Delimiter),
|
||||||
CloseDelimiter(Delimiter),
|
CloseDelimiter(Delimiter),
|
||||||
|
Type,
|
||||||
|
Identifier,
|
||||||
ItVariable,
|
ItVariable,
|
||||||
Variable,
|
Variable,
|
||||||
CompareOperator,
|
CompareOperator,
|
||||||
@ -21,88 +24,170 @@ pub enum FlatShape {
|
|||||||
String,
|
String,
|
||||||
Path,
|
Path,
|
||||||
Word,
|
Word,
|
||||||
|
Keyword,
|
||||||
Pipe,
|
Pipe,
|
||||||
GlobPattern,
|
GlobPattern,
|
||||||
Flag,
|
Flag,
|
||||||
ShorthandFlag,
|
ShorthandFlag,
|
||||||
Int,
|
Int,
|
||||||
Decimal,
|
Decimal,
|
||||||
|
Garbage,
|
||||||
Whitespace,
|
Whitespace,
|
||||||
Separator,
|
Separator,
|
||||||
Error,
|
|
||||||
Comment,
|
Comment,
|
||||||
Size { number: Span, unit: Span },
|
Size { number: Span, unit: Span },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum ShapeResult {
|
||||||
|
Success(Spanned<FlatShape>),
|
||||||
|
Fallback {
|
||||||
|
shape: Spanned<FlatShape>,
|
||||||
|
allowed: Vec<String>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for ShapeResult {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
match self {
|
||||||
|
ShapeResult::Success(shape) => shape.span,
|
||||||
|
ShapeResult::Fallback { shape, .. } => shape.span,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for FlatShape {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct TraceShape {
|
||||||
|
shape: FlatShape,
|
||||||
|
span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ShellTypeName for TraceShape {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
self.shape.type_name()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for TraceShape {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
self.shape.pretty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for TraceShape {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ShellTypeName for FlatShape {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
FlatShape::OpenDelimiter(Delimiter::Brace) => "open brace",
|
||||||
|
FlatShape::OpenDelimiter(Delimiter::Paren) => "open paren",
|
||||||
|
FlatShape::OpenDelimiter(Delimiter::Square) => "open square",
|
||||||
|
FlatShape::CloseDelimiter(Delimiter::Brace) => "close brace",
|
||||||
|
FlatShape::CloseDelimiter(Delimiter::Paren) => "close paren",
|
||||||
|
FlatShape::CloseDelimiter(Delimiter::Square) => "close square",
|
||||||
|
FlatShape::Type => "type",
|
||||||
|
FlatShape::Identifier => "identifier",
|
||||||
|
FlatShape::ItVariable => "$it",
|
||||||
|
FlatShape::Variable => "variable",
|
||||||
|
FlatShape::CompareOperator => "comparison",
|
||||||
|
FlatShape::Dot => "dot",
|
||||||
|
FlatShape::DotDot => "dotdot",
|
||||||
|
FlatShape::InternalCommand => "internal command",
|
||||||
|
FlatShape::ExternalCommand => "external command",
|
||||||
|
FlatShape::ExternalWord => "external word",
|
||||||
|
FlatShape::BareMember => "bare member",
|
||||||
|
FlatShape::StringMember => "string member",
|
||||||
|
FlatShape::String => "string",
|
||||||
|
FlatShape::Path => "path",
|
||||||
|
FlatShape::Word => "word",
|
||||||
|
FlatShape::Keyword => "keyword",
|
||||||
|
FlatShape::Pipe => "pipe",
|
||||||
|
FlatShape::GlobPattern => "glob",
|
||||||
|
FlatShape::Flag => "flag",
|
||||||
|
FlatShape::ShorthandFlag => "shorthand flag",
|
||||||
|
FlatShape::Int => "int",
|
||||||
|
FlatShape::Decimal => "decimal",
|
||||||
|
FlatShape::Garbage => "garbage",
|
||||||
|
FlatShape::Whitespace => "whitespace",
|
||||||
|
FlatShape::Separator => "separator",
|
||||||
|
FlatShape::Comment => "comment",
|
||||||
|
FlatShape::Size { .. } => "size",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl FlatShape {
|
impl FlatShape {
|
||||||
pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) {
|
pub fn into_trace_shape(self, span: Span) -> TraceShape {
|
||||||
match token {
|
TraceShape { shape: self, span }
|
||||||
TokenNode::Token(token) => match token.unspanned {
|
}
|
||||||
UnspannedToken::Number(RawNumber::Int(_)) => {
|
|
||||||
shapes.push(FlatShape::Int.spanned(token.span))
|
pub fn shapes(token: &SpannedToken, source: &Text) -> Vec<Spanned<FlatShape>> {
|
||||||
}
|
let mut shapes = vec![];
|
||||||
UnspannedToken::Number(RawNumber::Decimal(_)) => {
|
|
||||||
shapes.push(FlatShape::Decimal.spanned(token.span))
|
FlatShape::from(token, source, &mut shapes);
|
||||||
}
|
shapes
|
||||||
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => {
|
}
|
||||||
shapes.push(FlatShape::Dot.spanned(token.span))
|
|
||||||
}
|
fn from(token: &SpannedToken, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) {
|
||||||
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => {
|
let span = token.span();
|
||||||
shapes.push(FlatShape::DotDot.spanned(token.span))
|
|
||||||
}
|
match token.unspanned() {
|
||||||
UnspannedToken::CompareOperator(_) => {
|
Token::Number(RawNumber::Int(_)) => shapes.push(FlatShape::Int.spanned(span)),
|
||||||
shapes.push(FlatShape::CompareOperator.spanned(token.span))
|
Token::Number(RawNumber::Decimal(_)) => shapes.push(FlatShape::Decimal.spanned(span)),
|
||||||
}
|
Token::EvaluationOperator(EvaluationOperator::Dot) => {
|
||||||
UnspannedToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)),
|
shapes.push(FlatShape::Dot.spanned(span))
|
||||||
UnspannedToken::Variable(v) if v.slice(source) == "it" => {
|
|
||||||
shapes.push(FlatShape::ItVariable.spanned(token.span))
|
|
||||||
}
|
|
||||||
UnspannedToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)),
|
|
||||||
UnspannedToken::ExternalCommand(_) => {
|
|
||||||
shapes.push(FlatShape::ExternalCommand.spanned(token.span))
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalWord => {
|
|
||||||
shapes.push(FlatShape::ExternalWord.spanned(token.span))
|
|
||||||
}
|
|
||||||
UnspannedToken::GlobPattern => {
|
|
||||||
shapes.push(FlatShape::GlobPattern.spanned(token.span))
|
|
||||||
}
|
|
||||||
UnspannedToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)),
|
|
||||||
},
|
|
||||||
TokenNode::Call(_) => unimplemented!(),
|
|
||||||
TokenNode::Nodes(nodes) => {
|
|
||||||
for node in &nodes.item {
|
|
||||||
FlatShape::from(node, source, shapes);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
TokenNode::Delimited(v) => {
|
Token::EvaluationOperator(EvaluationOperator::DotDot) => {
|
||||||
shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0));
|
shapes.push(FlatShape::DotDot.spanned(span))
|
||||||
for token in &v.item.children {
|
}
|
||||||
|
Token::CompareOperator(_) => shapes.push(FlatShape::CompareOperator.spanned(span)),
|
||||||
|
Token::String(_) => shapes.push(FlatShape::String.spanned(span)),
|
||||||
|
Token::Variable(v) if v.slice(source) == "it" => {
|
||||||
|
shapes.push(FlatShape::ItVariable.spanned(span))
|
||||||
|
}
|
||||||
|
Token::Variable(_) => shapes.push(FlatShape::Variable.spanned(span)),
|
||||||
|
Token::ItVariable(_) => shapes.push(FlatShape::ItVariable.spanned(span)),
|
||||||
|
Token::ExternalCommand(_) => shapes.push(FlatShape::ExternalCommand.spanned(span)),
|
||||||
|
Token::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(span)),
|
||||||
|
Token::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(span)),
|
||||||
|
Token::Bare => shapes.push(FlatShape::Word.spanned(span)),
|
||||||
|
Token::Call(_) => unimplemented!(),
|
||||||
|
Token::Delimited(v) => {
|
||||||
|
shapes.push(FlatShape::OpenDelimiter(v.delimiter).spanned(v.spans.0));
|
||||||
|
for token in &v.children {
|
||||||
FlatShape::from(token, source, shapes);
|
FlatShape::from(token, source, shapes);
|
||||||
}
|
}
|
||||||
shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1));
|
shapes.push(FlatShape::CloseDelimiter(v.delimiter).spanned(v.spans.1));
|
||||||
}
|
}
|
||||||
TokenNode::Pipeline(pipeline) => {
|
Token::Pipeline(pipeline) => {
|
||||||
for part in &pipeline.parts {
|
for part in &pipeline.parts {
|
||||||
if part.pipe.is_some() {
|
if part.pipe.is_some() {
|
||||||
shapes.push(FlatShape::Pipe.spanned(part.span()));
|
shapes.push(FlatShape::Pipe.spanned(part.span()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenNode::Flag(Flag {
|
Token::Flag(Flag {
|
||||||
kind: FlagKind::Longhand,
|
kind: FlagKind::Longhand,
|
||||||
span,
|
|
||||||
..
|
..
|
||||||
}) => shapes.push(FlatShape::Flag.spanned(*span)),
|
}) => shapes.push(FlatShape::Flag.spanned(span)),
|
||||||
TokenNode::Flag(Flag {
|
Token::Flag(Flag {
|
||||||
kind: FlagKind::Shorthand,
|
kind: FlagKind::Shorthand,
|
||||||
span,
|
|
||||||
..
|
..
|
||||||
}) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)),
|
}) => shapes.push(FlatShape::ShorthandFlag.spanned(span)),
|
||||||
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())),
|
Token::Garbage => shapes.push(FlatShape::Garbage.spanned(span)),
|
||||||
TokenNode::Separator(_) => shapes.push(FlatShape::Separator.spanned(token.span())),
|
Token::Whitespace => shapes.push(FlatShape::Whitespace.spanned(span)),
|
||||||
TokenNode::Comment(_) => shapes.push(FlatShape::Comment.spanned(token.span())),
|
Token::Separator => shapes.push(FlatShape::Separator.spanned(span)),
|
||||||
TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)),
|
Token::Comment(_) => shapes.push(FlatShape::Comment.spanned(span)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,7 @@
|
|||||||
use crate::hir::syntax_shape::FlatShape;
|
use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
||||||
use ansi_term::Color;
|
use ansi_term::Color;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_source::{Spanned, Text};
|
use nu_source::{Spanned, Text};
|
||||||
use ptree::*;
|
use ptree::*;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
@ -10,14 +10,24 @@ use std::io;
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum FrameChild {
|
pub enum FrameChild {
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
Shape(Spanned<FlatShape>),
|
Shape(ShapeResult),
|
||||||
Frame(ColorFrame),
|
Frame(ColorFrame),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FrameChild {
|
impl FrameChild {
|
||||||
fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
FrameChild::Shape(shape) => write!(
|
FrameChild::Shape(ShapeResult::Success(shape)) => write!(
|
||||||
|
f,
|
||||||
|
"{} {:?}",
|
||||||
|
Color::White
|
||||||
|
.bold()
|
||||||
|
.on(Color::Green)
|
||||||
|
.paint(format!("{:?}", shape.item)),
|
||||||
|
shape.span.slice(text)
|
||||||
|
),
|
||||||
|
|
||||||
|
FrameChild::Shape(ShapeResult::Fallback { shape, .. }) => write!(
|
||||||
f,
|
f,
|
||||||
"{} {:?}",
|
"{} {:?}",
|
||||||
Color::White
|
Color::White
|
||||||
@ -43,7 +53,7 @@ impl FrameChild {
|
|||||||
pub struct ColorFrame {
|
pub struct ColorFrame {
|
||||||
description: &'static str,
|
description: &'static str,
|
||||||
children: Vec<FrameChild>,
|
children: Vec<FrameChild>,
|
||||||
error: Option<ShellError>,
|
error: Option<ParseError>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ColorFrame {
|
impl ColorFrame {
|
||||||
@ -98,8 +108,7 @@ impl ColorFrame {
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
fn add_shape(&mut self, shape: ShapeResult) {
|
||||||
fn add_shape(&mut self, shape: Spanned<FlatShape>) {
|
|
||||||
self.children.push(FrameChild::Shape(shape))
|
self.children.push(FrameChild::Shape(shape))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -107,10 +116,10 @@ impl ColorFrame {
|
|||||||
self.any_child_shape(|_| true)
|
self.any_child_shape(|_| true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn any_child_shape(&self, predicate: impl Fn(Spanned<FlatShape>) -> bool) -> bool {
|
fn any_child_shape(&self, predicate: impl Fn(&ShapeResult) -> bool) -> bool {
|
||||||
for item in &self.children {
|
for item in &self.children {
|
||||||
if let FrameChild::Shape(shape) = item {
|
if let FrameChild::Shape(shape) = item {
|
||||||
if predicate(*shape) {
|
if predicate(shape) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -172,14 +181,24 @@ impl ColorFrame {
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum TreeChild {
|
pub enum TreeChild {
|
||||||
Shape(Spanned<FlatShape>, Text),
|
Shape(ShapeResult, Text),
|
||||||
Frame(ColorFrame, Text),
|
Frame(ColorFrame, Text),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TreeChild {
|
impl TreeChild {
|
||||||
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
TreeChild::Shape(shape, text) => write!(
|
TreeChild::Shape(ShapeResult::Success(shape), text) => write!(
|
||||||
|
f,
|
||||||
|
"{} {:?}",
|
||||||
|
Color::White
|
||||||
|
.bold()
|
||||||
|
.on(Color::Green)
|
||||||
|
.paint(format!("{:?}", shape.item)),
|
||||||
|
shape.span.slice(text)
|
||||||
|
),
|
||||||
|
|
||||||
|
TreeChild::Shape(ShapeResult::Fallback { shape, .. }, text) => write!(
|
||||||
f,
|
f,
|
||||||
"{} {:?}",
|
"{} {:?}",
|
||||||
Color::White
|
Color::White
|
||||||
@ -290,8 +309,7 @@ impl ColorTracer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
pub fn add_shape(&mut self, shape: ShapeResult) {
|
||||||
pub fn add_shape(&mut self, shape: Spanned<FlatShape>) {
|
|
||||||
self.current_frame().add_shape(shape);
|
self.current_frame().add_shape(shape);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -302,7 +320,7 @@ impl ColorTracer {
|
|||||||
.push(FrameChild::Frame(current));
|
.push(FrameChild::Frame(current));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn failed(&mut self, error: &ShellError) {
|
pub fn failed(&mut self, error: &ParseError) {
|
||||||
let mut current = self.pop_frame();
|
let mut current = self.pop_frame();
|
||||||
current.error = Some(error.clone());
|
current.error = Some(error.clone());
|
||||||
self.current_frame()
|
self.current_frame()
|
||||||
|
@ -1,26 +1,44 @@
|
|||||||
use crate::hir::Expression;
|
use crate::hir::syntax_shape::flat_shape::TraceShape;
|
||||||
|
use crate::hir::SpannedExpression;
|
||||||
|
use crate::parse::token_tree::SpannedToken;
|
||||||
use ansi_term::Color;
|
use ansi_term::Color;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_errors::ParseError;
|
use nu_errors::{ParseError, ParseErrorReason};
|
||||||
use nu_protocol::ShellTypeName;
|
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
||||||
use nu_source::{DebugDoc, PrettyDebug, PrettyDebugWithSource, Text};
|
use nu_source::{DebugDoc, PrettyDebug, PrettyDebugWithSource, Span, Spanned, Text};
|
||||||
use ptree::*;
|
use ptree::*;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::fmt::Debug;
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum FrameChild {
|
pub enum FrameChild<T: SpannedTypeName> {
|
||||||
Expr(Expression),
|
Expr(T),
|
||||||
Frame(ExprFrame),
|
Shape(Result<TraceShape, TraceShape>),
|
||||||
|
Frame(Box<ExprFrame<T>>),
|
||||||
Result(DebugDoc),
|
Result(DebugDoc),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FrameChild {
|
fn err_desc(error: &ParseError) -> &'static str {
|
||||||
fn get_error_leaf(&self) -> Option<&'static str> {
|
match error.reason() {
|
||||||
|
ParseErrorReason::ExtraTokens { .. } => "extra tokens",
|
||||||
|
ParseErrorReason::Mismatch { .. } => "mismatch",
|
||||||
|
ParseErrorReason::ArgumentError { .. } => "argument error",
|
||||||
|
ParseErrorReason::Eof { .. } => "eof",
|
||||||
|
ParseErrorReason::InternalError { .. } => "internal error",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: SpannedTypeName> FrameChild<T> {
|
||||||
|
fn get_error_leaf(&self) -> Option<(&'static str, &'static str)> {
|
||||||
match self {
|
match self {
|
||||||
FrameChild::Frame(frame) if frame.error.is_some() => {
|
FrameChild::Frame(frame) => {
|
||||||
if frame.children.is_empty() {
|
if let Some(error) = &frame.error {
|
||||||
Some(frame.description)
|
if frame.children.is_empty() {
|
||||||
|
Some((frame.description, err_desc(error)))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@ -31,15 +49,34 @@ impl FrameChild {
|
|||||||
|
|
||||||
fn to_tree_child(&self, text: &Text) -> TreeChild {
|
fn to_tree_child(&self, text: &Text) -> TreeChild {
|
||||||
match self {
|
match self {
|
||||||
FrameChild::Expr(expr) => TreeChild::OkExpr(expr.clone(), text.clone()),
|
FrameChild::Expr(expr) => TreeChild::OkExpr {
|
||||||
|
source: expr.spanned_type_name().span,
|
||||||
|
desc: expr.spanned_type_name().item,
|
||||||
|
text: text.clone(),
|
||||||
|
},
|
||||||
|
FrameChild::Shape(Ok(shape)) => TreeChild::OkShape {
|
||||||
|
source: shape.spanned_type_name().span,
|
||||||
|
desc: shape.spanned_type_name().item,
|
||||||
|
text: text.clone(),
|
||||||
|
fallback: false,
|
||||||
|
},
|
||||||
|
FrameChild::Shape(Err(shape)) => TreeChild::OkShape {
|
||||||
|
source: shape.spanned_type_name().span,
|
||||||
|
desc: shape.spanned_type_name().item,
|
||||||
|
text: text.clone(),
|
||||||
|
fallback: true,
|
||||||
|
},
|
||||||
FrameChild::Result(result) => {
|
FrameChild::Result(result) => {
|
||||||
let result = result.display();
|
let result = result.display();
|
||||||
TreeChild::OkNonExpr(result)
|
TreeChild::OkNonExpr(result)
|
||||||
}
|
}
|
||||||
FrameChild::Frame(frame) => {
|
FrameChild::Frame(frame) => {
|
||||||
if frame.error.is_some() {
|
if let Some(err) = &frame.error {
|
||||||
if frame.children.is_empty() {
|
if frame.children.is_empty() {
|
||||||
TreeChild::ErrorLeaf(vec![frame.description])
|
TreeChild::ErrorLeaf(
|
||||||
|
vec![(frame.description, err_desc(err))],
|
||||||
|
frame.token_desc(),
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone())
|
TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone())
|
||||||
}
|
}
|
||||||
@ -51,14 +88,22 @@ impl FrameChild {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ExprFrame {
|
pub struct ExprFrame<T: SpannedTypeName> {
|
||||||
description: &'static str,
|
description: &'static str,
|
||||||
children: Vec<FrameChild>,
|
token: Option<SpannedToken>,
|
||||||
|
children: Vec<FrameChild<T>>,
|
||||||
error: Option<ParseError>,
|
error: Option<ParseError>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExprFrame {
|
impl<T: SpannedTypeName> ExprFrame<T> {
|
||||||
|
fn token_desc(&self) -> &'static str {
|
||||||
|
match &self.token {
|
||||||
|
None => "EOF",
|
||||||
|
Some(token) => token.type_name(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn to_tree_frame(&self, text: &Text) -> TreeFrame {
|
fn to_tree_frame(&self, text: &Text) -> TreeFrame {
|
||||||
let mut children = vec![];
|
let mut children = vec![];
|
||||||
let mut errors = vec![];
|
let mut errors = vec![];
|
||||||
@ -68,7 +113,7 @@ impl ExprFrame {
|
|||||||
errors.push(error_leaf);
|
errors.push(error_leaf);
|
||||||
continue;
|
continue;
|
||||||
} else if !errors.is_empty() {
|
} else if !errors.is_empty() {
|
||||||
children.push(TreeChild::ErrorLeaf(errors));
|
children.push(TreeChild::ErrorLeaf(errors, self.token_desc()));
|
||||||
errors = vec![];
|
errors = vec![];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -76,18 +121,27 @@ impl ExprFrame {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !errors.is_empty() {
|
if !errors.is_empty() {
|
||||||
children.push(TreeChild::ErrorLeaf(errors));
|
children.push(TreeChild::ErrorLeaf(errors, self.token_desc()));
|
||||||
}
|
}
|
||||||
|
|
||||||
TreeFrame {
|
TreeFrame {
|
||||||
description: self.description,
|
description: self.description,
|
||||||
|
token_desc: self.token_desc(),
|
||||||
children,
|
children,
|
||||||
error: self.error.clone(),
|
error: self.error.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_expr(&mut self, expr: Expression) {
|
fn add_return(&mut self, value: T) {
|
||||||
self.children.push(FrameChild::Expr(expr))
|
self.children.push(FrameChild::Expr(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_shape(&mut self, shape: TraceShape) {
|
||||||
|
self.children.push(FrameChild::Shape(Ok(shape)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_err_shape(&mut self, shape: TraceShape) {
|
||||||
|
self.children.push(FrameChild::Shape(Err(shape)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_result(&mut self, result: impl PrettyDebug) {
|
fn add_result(&mut self, result: impl PrettyDebug) {
|
||||||
@ -98,6 +152,7 @@ impl ExprFrame {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct TreeFrame {
|
pub struct TreeFrame {
|
||||||
description: &'static str,
|
description: &'static str,
|
||||||
|
token_desc: &'static str,
|
||||||
children: Vec<TreeChild>,
|
children: Vec<TreeChild>,
|
||||||
error: Option<ParseError>,
|
error: Option<ParseError>,
|
||||||
}
|
}
|
||||||
@ -113,29 +168,43 @@ impl TreeFrame {
|
|||||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))?;
|
write!(f, "{}", Color::Yellow.bold().paint(self.description))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
Color::White.bold().paint(&format!("({})", self.token_desc))
|
||||||
|
)?;
|
||||||
|
|
||||||
write!(f, " -> ")?;
|
write!(f, " -> ")?;
|
||||||
self.children[0].leaf_description(f)
|
self.children[0].leaf_description(f)
|
||||||
} else if self.error.is_some() {
|
|
||||||
if self.children.is_empty() {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
Color::White.bold().on(Color::Red).paint(self.description)
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
write!(f, "{}", Color::Red.normal().paint(self.description))
|
|
||||||
}
|
|
||||||
} else if self.has_descendent_green() {
|
|
||||||
write!(f, "{}", Color::Green.normal().paint(self.description))
|
|
||||||
} else {
|
} else {
|
||||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))
|
if self.error.is_some() {
|
||||||
|
if self.children.is_empty() {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
Color::White.bold().on(Color::Red).paint(self.description)
|
||||||
|
)?
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", Color::Red.normal().paint(self.description))?
|
||||||
|
}
|
||||||
|
} else if self.has_descendent_green() {
|
||||||
|
write!(f, "{}", Color::Green.normal().paint(self.description))?
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", Color::Yellow.bold().paint(self.description))?
|
||||||
|
}
|
||||||
|
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
Color::White.bold().paint(&format!("({})", self.token_desc))
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_child_green(&self) -> bool {
|
fn has_child_green(&self) -> bool {
|
||||||
self.children.iter().any(|item| match item {
|
self.children.iter().any(|item| match item {
|
||||||
TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false,
|
TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false,
|
||||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) => true,
|
TreeChild::OkExpr { .. } | TreeChild::OkShape { .. } | TreeChild::OkNonExpr(..) => true,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -163,9 +232,10 @@ impl TreeFrame {
|
|||||||
if self.children.len() == 1 {
|
if self.children.len() == 1 {
|
||||||
let child: &TreeChild = &self.children[0];
|
let child: &TreeChild = &self.children[0];
|
||||||
match child {
|
match child {
|
||||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => {
|
TreeChild::OkExpr { .. }
|
||||||
vec![]
|
| TreeChild::OkShape { .. }
|
||||||
}
|
| TreeChild::OkNonExpr(..)
|
||||||
|
| TreeChild::ErrorLeaf(..) => vec![],
|
||||||
TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => {
|
TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => {
|
||||||
frame.children_for_formatting(text)
|
frame.children_for_formatting(text)
|
||||||
}
|
}
|
||||||
@ -179,21 +249,44 @@ impl TreeFrame {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum TreeChild {
|
pub enum TreeChild {
|
||||||
OkNonExpr(String),
|
OkNonExpr(String),
|
||||||
OkExpr(Expression, Text),
|
OkExpr {
|
||||||
|
source: Span,
|
||||||
|
desc: &'static str,
|
||||||
|
text: Text,
|
||||||
|
},
|
||||||
|
OkShape {
|
||||||
|
source: Span,
|
||||||
|
desc: &'static str,
|
||||||
|
text: Text,
|
||||||
|
fallback: bool,
|
||||||
|
},
|
||||||
OkFrame(TreeFrame, Text),
|
OkFrame(TreeFrame, Text),
|
||||||
ErrorFrame(TreeFrame, Text),
|
ErrorFrame(TreeFrame, Text),
|
||||||
ErrorLeaf(Vec<&'static str>),
|
ErrorLeaf(Vec<(&'static str, &'static str)>, &'static str),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TreeChild {
|
impl TreeChild {
|
||||||
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
TreeChild::OkExpr(expr, text) => write!(
|
TreeChild::OkExpr { source, desc, text } => write!(
|
||||||
f,
|
f,
|
||||||
"{} {} {}",
|
"{} {} {}",
|
||||||
Color::Cyan.normal().paint("returns"),
|
Color::Cyan.normal().paint("returns"),
|
||||||
Color::White.bold().on(Color::Green).paint(expr.type_name()),
|
Color::White.bold().on(Color::Green).paint(*desc),
|
||||||
expr.span.slice(text)
|
source.slice(text)
|
||||||
|
),
|
||||||
|
|
||||||
|
TreeChild::OkShape {
|
||||||
|
source,
|
||||||
|
desc,
|
||||||
|
text,
|
||||||
|
fallback,
|
||||||
|
} => write!(
|
||||||
|
f,
|
||||||
|
"{} {} {}",
|
||||||
|
Color::Purple.normal().paint("paints"),
|
||||||
|
Color::White.bold().on(Color::Green).paint(*desc),
|
||||||
|
source.slice(text)
|
||||||
),
|
),
|
||||||
|
|
||||||
TreeChild::OkNonExpr(result) => write!(
|
TreeChild::OkNonExpr(result) => write!(
|
||||||
@ -206,17 +299,21 @@ impl TreeChild {
|
|||||||
.paint(result.to_string())
|
.paint(result.to_string())
|
||||||
),
|
),
|
||||||
|
|
||||||
TreeChild::ErrorLeaf(desc) => {
|
TreeChild::ErrorLeaf(desc, token_desc) => {
|
||||||
let last = desc.len() - 1;
|
let last = desc.len() - 1;
|
||||||
|
|
||||||
for (i, item) in desc.iter().enumerate() {
|
for (i, (desc, err_desc)) in desc.iter().enumerate() {
|
||||||
write!(f, "{}", Color::White.bold().on(Color::Red).paint(*item))?;
|
write!(f, "{}", Color::White.bold().on(Color::Red).paint(*desc))?;
|
||||||
|
|
||||||
|
write!(f, " {}", Color::White.bold().paint(*err_desc))?;
|
||||||
|
|
||||||
if i != last {
|
if i != last {
|
||||||
write!(f, "{}", Color::White.normal().paint(", "))?;
|
write!(f, "{}", Color::White.normal().paint(", "))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// write!(f, " {}", Color::Black.bold().paint(*token_desc))?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -236,9 +333,10 @@ impl TreeItem for TreeChild {
|
|||||||
|
|
||||||
fn children(&self) -> Cow<[Self::Child]> {
|
fn children(&self) -> Cow<[Self::Child]> {
|
||||||
match self {
|
match self {
|
||||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => {
|
TreeChild::OkExpr { .. }
|
||||||
Cow::Borrowed(&[])
|
| TreeChild::OkShape { .. }
|
||||||
}
|
| TreeChild::OkNonExpr(..)
|
||||||
|
| TreeChild::ErrorLeaf(..) => Cow::Borrowed(&[]),
|
||||||
TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => {
|
TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => {
|
||||||
Cow::Owned(frame.children_for_formatting(text))
|
Cow::Owned(frame.children_for_formatting(text))
|
||||||
}
|
}
|
||||||
@ -246,39 +344,46 @@ impl TreeItem for TreeChild {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ExpandTracer {
|
pub struct ExpandTracer<T: SpannedTypeName> {
|
||||||
frame_stack: Vec<ExprFrame>,
|
desc: &'static str,
|
||||||
|
frame_stack: Vec<ExprFrame<T>>,
|
||||||
source: Text,
|
source: Text,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpandTracer {
|
impl<T: SpannedTypeName + Debug> ExpandTracer<T> {
|
||||||
pub fn print(&self, source: Text) -> PrintTracer {
|
pub fn print(&self, source: Text) -> PrintTracer {
|
||||||
let root = self.frame_stack[0].to_tree_frame(&source);
|
let root = self.frame_stack[0].to_tree_frame(&source);
|
||||||
|
|
||||||
PrintTracer { root, source }
|
PrintTracer {
|
||||||
|
root,
|
||||||
|
desc: self.desc,
|
||||||
|
source,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(source: Text) -> ExpandTracer {
|
pub fn new(desc: &'static str, source: Text) -> ExpandTracer<T> {
|
||||||
let root = ExprFrame {
|
let root = ExprFrame {
|
||||||
description: "Trace",
|
description: "Trace",
|
||||||
children: vec![],
|
children: vec![],
|
||||||
|
token: None,
|
||||||
error: None,
|
error: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
ExpandTracer {
|
ExpandTracer {
|
||||||
|
desc,
|
||||||
frame_stack: vec![root],
|
frame_stack: vec![root],
|
||||||
source,
|
source,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_frame(&mut self) -> &mut ExprFrame {
|
fn current_frame(&mut self) -> &mut ExprFrame<T> {
|
||||||
let frames = &mut self.frame_stack;
|
let frames = &mut self.frame_stack;
|
||||||
let last = frames.len() - 1;
|
let last = frames.len() - 1;
|
||||||
&mut frames[last]
|
&mut frames[last]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pop_frame(&mut self) -> ExprFrame {
|
fn pop_frame(&mut self) -> ExprFrame<T> {
|
||||||
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
||||||
|
|
||||||
if self.frame_stack.is_empty() {
|
if self.frame_stack.is_empty() {
|
||||||
@ -290,10 +395,11 @@ impl ExpandTracer {
|
|||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn start(&mut self, description: &'static str) {
|
pub fn start(&mut self, description: &'static str, token: Option<SpannedToken>) {
|
||||||
let frame = ExprFrame {
|
let frame = ExprFrame {
|
||||||
description,
|
description,
|
||||||
children: vec![],
|
children: vec![],
|
||||||
|
token,
|
||||||
error: None,
|
error: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -301,8 +407,36 @@ impl ExpandTracer {
|
|||||||
self.debug();
|
self.debug();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_expr(&mut self, shape: Expression) {
|
pub fn add_return(&mut self, value: T) {
|
||||||
self.current_frame().add_expr(shape);
|
self.current_frame().add_return(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_shape(&mut self, shape: TraceShape) {
|
||||||
|
self.current_frame().add_shape(shape);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_err_shape(&mut self, shape: TraceShape) {
|
||||||
|
self.current_frame().add_err_shape(shape);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finish(&mut self) {
|
||||||
|
loop {
|
||||||
|
if self.frame_stack.len() == 1 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let frame = self.pop_frame();
|
||||||
|
self.current_frame()
|
||||||
|
.children
|
||||||
|
.push(FrameChild::Frame(Box::new(frame)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn eof_frame(&mut self) {
|
||||||
|
let current = self.pop_frame();
|
||||||
|
self.current_frame()
|
||||||
|
.children
|
||||||
|
.push(FrameChild::Frame(Box::new(current)));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_result(&mut self, result: impl PrettyDebugWithSource) {
|
pub fn add_result(&mut self, result: impl PrettyDebugWithSource) {
|
||||||
@ -316,7 +450,7 @@ impl ExpandTracer {
|
|||||||
let current = self.pop_frame();
|
let current = self.pop_frame();
|
||||||
self.current_frame()
|
self.current_frame()
|
||||||
.children
|
.children
|
||||||
.push(FrameChild::Frame(current));
|
.push(FrameChild::Frame(Box::new(current)));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn failed(&mut self, error: &ParseError) {
|
pub fn failed(&mut self, error: &ParseError) {
|
||||||
@ -324,7 +458,7 @@ impl ExpandTracer {
|
|||||||
current.error = Some(error.clone());
|
current.error = Some(error.clone());
|
||||||
self.current_frame()
|
self.current_frame()
|
||||||
.children
|
.children
|
||||||
.push(FrameChild::Frame(current));
|
.push(FrameChild::Frame(Box::new(current)));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn debug(&self) {
|
fn debug(&self) {
|
||||||
@ -342,6 +476,7 @@ impl ExpandTracer {
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct PrintTracer {
|
pub struct PrintTracer {
|
||||||
|
desc: &'static str,
|
||||||
root: TreeFrame,
|
root: TreeFrame,
|
||||||
source: Text,
|
source: Text,
|
||||||
}
|
}
|
||||||
@ -350,7 +485,7 @@ impl TreeItem for PrintTracer {
|
|||||||
type Child = TreeChild;
|
type Child = TreeChild;
|
||||||
|
|
||||||
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
||||||
write!(f, "{}", style.paint("Expansion Trace"))
|
write!(f, "{}", style.paint(self.desc))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn children(&self) -> Cow<[Self::Child]> {
|
fn children(&self) -> Cow<[Self::Child]> {
|
||||||
|
56
crates/nu-parser/src/hir/tokens_iterator/into_shapes.rs
Normal file
56
crates/nu-parser/src/hir/tokens_iterator/into_shapes.rs
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
||||||
|
use nu_source::{Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
|
pub struct FlatShapes {
|
||||||
|
shapes: Vec<ShapeResult>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> IntoIterator for &'a FlatShapes {
|
||||||
|
type Item = &'a ShapeResult;
|
||||||
|
type IntoIter = std::slice::Iter<'a, ShapeResult>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
self.shapes.iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait IntoShapes: 'static {
|
||||||
|
fn into_shapes(self, span: Span) -> FlatShapes;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoShapes for FlatShape {
|
||||||
|
fn into_shapes(self, span: Span) -> FlatShapes {
|
||||||
|
FlatShapes {
|
||||||
|
shapes: vec![ShapeResult::Success(self.spanned(span))],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoShapes for Vec<Spanned<FlatShape>> {
|
||||||
|
fn into_shapes(self, _span: Span) -> FlatShapes {
|
||||||
|
FlatShapes {
|
||||||
|
shapes: self.into_iter().map(ShapeResult::Success).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoShapes for Vec<ShapeResult> {
|
||||||
|
fn into_shapes(self, _span: Span) -> FlatShapes {
|
||||||
|
FlatShapes { shapes: self }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoShapes for () {
|
||||||
|
fn into_shapes(self, _span: Span) -> FlatShapes {
|
||||||
|
FlatShapes { shapes: vec![] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoShapes for Option<FlatShape> {
|
||||||
|
fn into_shapes(self, span: Span) -> FlatShapes {
|
||||||
|
match self {
|
||||||
|
Option::None => ().into_shapes(span),
|
||||||
|
Option::Some(shape) => shape.into_shapes(span),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
30
crates/nu-parser/src/hir/tokens_iterator/pattern.rs
Normal file
30
crates/nu-parser/src/hir/tokens_iterator/pattern.rs
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
use crate::parse::token_tree::{ParseErrorFn, SpannedToken, TokenType};
|
||||||
|
use nu_errors::ParseError;
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
pub struct Pattern<T> {
|
||||||
|
parts: Vec<Box<dyn TokenType<Output = T>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> TokenType for Pattern<T> {
|
||||||
|
type Output = T;
|
||||||
|
|
||||||
|
fn desc(&self) -> Cow<'static, str> {
|
||||||
|
Cow::Borrowed("pattern")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_token_value(
|
||||||
|
&self,
|
||||||
|
token: &SpannedToken,
|
||||||
|
err: ParseErrorFn<Self::Output>,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
for part in &self.parts {
|
||||||
|
match part.extract_token_value(token, err) {
|
||||||
|
Err(_) => {}
|
||||||
|
Ok(result) => return Ok(result),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err()
|
||||||
|
}
|
||||||
|
}
|
105
crates/nu-parser/src/hir/tokens_iterator/state.rs
Normal file
105
crates/nu-parser/src/hir/tokens_iterator/state.rs
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
use crate::hir::syntax_shape::flat_shape::ShapeResult;
|
||||||
|
use crate::hir::syntax_shape::ExpandContext;
|
||||||
|
use crate::hir::tokens_iterator::TokensIterator;
|
||||||
|
use crate::parse::token_tree::SpannedToken;
|
||||||
|
|
||||||
|
use getset::Getters;
|
||||||
|
use nu_errors::ParseError;
|
||||||
|
use nu_protocol::SpannedTypeName;
|
||||||
|
use nu_source::Span;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
#[derive(Getters, Debug, Clone)]
|
||||||
|
pub struct TokensIteratorState<'content> {
|
||||||
|
pub(crate) tokens: &'content [SpannedToken],
|
||||||
|
pub(crate) span: Span,
|
||||||
|
pub(crate) index: usize,
|
||||||
|
pub(crate) seen: indexmap::IndexSet<usize>,
|
||||||
|
#[get = "pub"]
|
||||||
|
pub(crate) shapes: Vec<ShapeResult>,
|
||||||
|
pub(crate) errors: indexmap::IndexMap<Span, Vec<String>>,
|
||||||
|
pub(crate) context: Arc<ExpandContext<'content>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Peeked<'content, 'me> {
|
||||||
|
pub(crate) node: Option<&'content SpannedToken>,
|
||||||
|
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||||
|
pub(crate) from: usize,
|
||||||
|
pub(crate) to: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'content, 'me> Peeked<'content, 'me> {
|
||||||
|
pub fn commit(&mut self) -> Option<&'content SpannedToken> {
|
||||||
|
let Peeked {
|
||||||
|
node,
|
||||||
|
iterator,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
let node = (*node)?;
|
||||||
|
iterator.commit(*from, *to);
|
||||||
|
Some(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rollback(self) {}
|
||||||
|
|
||||||
|
pub fn not_eof(self, expected: &str) -> Result<PeekedNode<'content, 'me>, ParseError> {
|
||||||
|
match self.node {
|
||||||
|
None => Err(ParseError::unexpected_eof(
|
||||||
|
expected.to_string(),
|
||||||
|
self.iterator.eof_span(),
|
||||||
|
)),
|
||||||
|
Some(node) => Ok(PeekedNode {
|
||||||
|
node,
|
||||||
|
iterator: self.iterator,
|
||||||
|
from: self.from,
|
||||||
|
to: self.to,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
||||||
|
peek_error(self.node, self.iterator.eof_span(), expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct PeekedNode<'content, 'me> {
|
||||||
|
pub(crate) node: &'content SpannedToken,
|
||||||
|
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||||
|
from: usize,
|
||||||
|
to: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'content, 'me> PeekedNode<'content, 'me> {
|
||||||
|
pub fn commit(self) -> &'content SpannedToken {
|
||||||
|
let PeekedNode {
|
||||||
|
node,
|
||||||
|
iterator,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
iterator.commit(from, to);
|
||||||
|
node
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rollback(self) {}
|
||||||
|
|
||||||
|
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
||||||
|
peek_error(Some(self.node), self.iterator.eof_span(), expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn peek_error(
|
||||||
|
node: Option<&SpannedToken>,
|
||||||
|
eof_span: Span,
|
||||||
|
expected: &'static str,
|
||||||
|
) -> ParseError {
|
||||||
|
match node {
|
||||||
|
None => ParseError::unexpected_eof(expected, eof_span),
|
||||||
|
Some(node) => ParseError::mismatch(expected, node.spanned_type_name()),
|
||||||
|
}
|
||||||
|
}
|
@ -3,12 +3,17 @@ use crate::parse::token_tree_builder::TokenTreeBuilder as b;
|
|||||||
use crate::Span;
|
use crate::Span;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
<<<<<<< HEAD
|
||||||
|
fn supplies_tokens() {
|
||||||
|
let tokens = b::token_list(vec![b::it_var(), b::op("."), b::bare("cpu")]);
|
||||||
|
=======
|
||||||
fn supplies_tokens() -> Result<(), Box<dyn std::error::Error>> {
|
fn supplies_tokens() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
let tokens = b::token_list(vec![b::var("it"), b::op("."), b::bare("cpu")]);
|
let tokens = b::token_list(vec![b::var("it"), b::op("."), b::bare("cpu")]);
|
||||||
|
>>>>>>> master
|
||||||
let (tokens, _) = b::build(tokens);
|
let (tokens, _) = b::build(tokens);
|
||||||
|
|
||||||
let tokens = tokens.expect_list();
|
let tokens = tokens.expect_list();
|
||||||
let mut iterator = TokensIterator::all(tokens, Span::unknown());
|
let mut iterator = TokensIterator::new(tokens, Span::unknown());
|
||||||
|
|
||||||
iterator.next()?.expect_var();
|
iterator.next()?.expect_var();
|
||||||
iterator.next()?.expect_dot();
|
iterator.next()?.expect_dot();
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
#![allow(clippy::large_enum_variant, clippy::type_complexity)]
|
#[macro_use]
|
||||||
|
pub mod macros;
|
||||||
|
|
||||||
pub mod commands;
|
pub mod commands;
|
||||||
pub mod hir;
|
pub mod hir;
|
||||||
@ -8,23 +9,64 @@ pub mod parse_command;
|
|||||||
pub use crate::commands::classified::{
|
pub use crate::commands::classified::{
|
||||||
external::ExternalCommand, internal::InternalCommand, ClassifiedCommand, ClassifiedPipeline,
|
external::ExternalCommand, internal::InternalCommand, ClassifiedCommand, ClassifiedPipeline,
|
||||||
};
|
};
|
||||||
pub use crate::hir::syntax_shape::flat_shape::FlatShape;
|
pub use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
||||||
pub use crate::hir::syntax_shape::{
|
pub use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax, PipelineShape, SignatureRegistry};
|
||||||
expand_syntax, ExpandContext, ExpandSyntax, PipelineShape, SignatureRegistry,
|
|
||||||
};
|
|
||||||
pub use crate::hir::tokens_iterator::TokensIterator;
|
pub use crate::hir::tokens_iterator::TokensIterator;
|
||||||
pub use crate::parse::files::Files;
|
pub use crate::parse::files::Files;
|
||||||
pub use crate::parse::flag::Flag;
|
pub use crate::parse::flag::Flag;
|
||||||
pub use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
pub use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||||
pub use crate::parse::parser::Number;
|
pub use crate::parse::parser::Number;
|
||||||
pub use crate::parse::parser::{module, pipeline};
|
pub use crate::parse::parser::{module, pipeline};
|
||||||
pub use crate::parse::token_tree::{Delimiter, TokenNode};
|
pub use crate::parse::token_tree::{Delimiter, SpannedToken, Token};
|
||||||
pub use crate::parse::token_tree_builder::TokenTreeBuilder;
|
pub use crate::parse::token_tree_builder::TokenTreeBuilder;
|
||||||
|
|
||||||
|
use log::log_enabled;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_source::nom_input;
|
use nu_protocol::{errln, outln};
|
||||||
|
use nu_source::{nom_input, HasSpan, Text};
|
||||||
|
|
||||||
pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
|
pub fn pipeline_shapes(line: &str, expand_context: ExpandContext) -> Vec<ShapeResult> {
|
||||||
|
let tokens = parse_pipeline(line);
|
||||||
|
|
||||||
|
match tokens {
|
||||||
|
Err(_) => vec![],
|
||||||
|
Ok(v) => {
|
||||||
|
let pipeline = match v.as_pipeline() {
|
||||||
|
Err(_) => return vec![],
|
||||||
|
Ok(v) => v,
|
||||||
|
};
|
||||||
|
|
||||||
|
let tokens = vec![Token::Pipeline(pipeline).into_spanned(v.span())];
|
||||||
|
let mut tokens = TokensIterator::new(&tokens[..], expand_context, v.span());
|
||||||
|
|
||||||
|
let shapes = {
|
||||||
|
// We just constructed a token list that only contains a pipeline, so it can't fail
|
||||||
|
let result = tokens.expand_infallible(PipelineShape);
|
||||||
|
|
||||||
|
if let Some(failure) = result.failed {
|
||||||
|
errln!(
|
||||||
|
"BUG: PipelineShape didn't find a pipeline :: {:#?}",
|
||||||
|
failure
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens.finish_tracer();
|
||||||
|
|
||||||
|
tokens.state().shapes()
|
||||||
|
};
|
||||||
|
|
||||||
|
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
||||||
|
outln!("");
|
||||||
|
let _ = ptree::print_tree(&tokens.expand_tracer().clone().print(Text::from(line)));
|
||||||
|
outln!("");
|
||||||
|
}
|
||||||
|
|
||||||
|
shapes.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_pipeline(input: &str) -> Result<SpannedToken, ShellError> {
|
||||||
let _ = pretty_env_logger::try_init();
|
let _ = pretty_env_logger::try_init();
|
||||||
|
|
||||||
match pipeline(nom_input(input)) {
|
match pipeline(nom_input(input)) {
|
||||||
@ -33,7 +75,9 @@ pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_script(input: &str) -> Result<TokenNode, ShellError> {
|
pub use parse_pipeline as parse;
|
||||||
|
|
||||||
|
pub fn parse_script(input: &str) -> Result<SpannedToken, ShellError> {
|
||||||
let _ = pretty_env_logger::try_init();
|
let _ = pretty_env_logger::try_init();
|
||||||
|
|
||||||
match module(nom_input(input)) {
|
match module(nom_input(input)) {
|
||||||
|
9
crates/nu-parser/src/macros.rs
Normal file
9
crates/nu-parser/src/macros.rs
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
#[macro_export]
|
||||||
|
macro_rules! return_ok {
|
||||||
|
($expr:expr) => {
|
||||||
|
match $expr {
|
||||||
|
Ok(val) => return Ok(val),
|
||||||
|
Err(_) => {}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
@ -2,11 +2,11 @@ pub(crate) mod call_node;
|
|||||||
pub(crate) mod comment;
|
pub(crate) mod comment;
|
||||||
pub(crate) mod files;
|
pub(crate) mod files;
|
||||||
pub(crate) mod flag;
|
pub(crate) mod flag;
|
||||||
|
pub(crate) mod number;
|
||||||
pub(crate) mod operator;
|
pub(crate) mod operator;
|
||||||
pub(crate) mod parser;
|
pub(crate) mod parser;
|
||||||
pub(crate) mod pipeline;
|
pub(crate) mod pipeline;
|
||||||
pub(crate) mod token_tree;
|
pub(crate) mod token_tree;
|
||||||
pub(crate) mod token_tree_builder;
|
pub(crate) mod token_tree_builder;
|
||||||
pub(crate) mod tokens;
|
|
||||||
pub(crate) mod unit;
|
pub(crate) mod unit;
|
||||||
pub(crate) mod util;
|
pub(crate) mod util;
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
use crate::TokenNode;
|
use crate::parse::token_tree::SpannedToken;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource};
|
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||||
pub struct CallNode {
|
pub struct CallNode {
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
head: Box<TokenNode>,
|
head: Box<SpannedToken>,
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
children: Option<Vec<TokenNode>>,
|
children: Option<Vec<SpannedToken>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for CallNode {
|
impl PrettyDebugWithSource for CallNode {
|
||||||
@ -29,7 +29,7 @@ impl PrettyDebugWithSource for CallNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl CallNode {
|
impl CallNode {
|
||||||
pub fn new(head: Box<TokenNode>, children: Vec<TokenNode>) -> CallNode {
|
pub fn new(head: Box<SpannedToken>, children: Vec<SpannedToken>) -> CallNode {
|
||||||
if children.is_empty() {
|
if children.is_empty() {
|
||||||
CallNode {
|
CallNode {
|
||||||
head,
|
head,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||||
@ -12,15 +12,13 @@ pub enum CommentKind {
|
|||||||
pub struct Comment {
|
pub struct Comment {
|
||||||
pub(crate) kind: CommentKind,
|
pub(crate) kind: CommentKind,
|
||||||
pub(crate) text: Span,
|
pub(crate) text: Span,
|
||||||
pub(crate) span: Span,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Comment {
|
impl Comment {
|
||||||
pub fn line(text: impl Into<Span>, outer: impl Into<Span>) -> Comment {
|
pub fn line(text: impl Into<Span>) -> Comment {
|
||||||
Comment {
|
Comment {
|
||||||
kind: CommentKind::Line,
|
kind: CommentKind::Line,
|
||||||
text: text.into(),
|
text: text.into(),
|
||||||
span: outer.into(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -34,9 +32,3 @@ impl PrettyDebugWithSource for Comment {
|
|||||||
prefix + b::description(self.text.slice(source))
|
prefix + b::description(self.text.slice(source))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasSpan for Comment {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -15,7 +15,6 @@ pub enum FlagKind {
|
|||||||
pub struct Flag {
|
pub struct Flag {
|
||||||
pub(crate) kind: FlagKind,
|
pub(crate) kind: FlagKind,
|
||||||
pub(crate) name: Span,
|
pub(crate) name: Span,
|
||||||
pub(crate) span: Span,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Flag {
|
impl PrettyDebugWithSource for Flag {
|
||||||
@ -30,10 +29,10 @@ impl PrettyDebugWithSource for Flag {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Flag {
|
impl Flag {
|
||||||
pub fn color(&self) -> Spanned<FlatShape> {
|
pub fn color(&self, span: impl Into<Span>) -> Spanned<FlatShape> {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
FlagKind::Longhand => FlatShape::Flag.spanned(self.span),
|
FlagKind::Longhand => FlatShape::Flag.spanned(span.into()),
|
||||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span),
|
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(span.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
70
crates/nu-parser/src/parse/number.rs
Normal file
70
crates/nu-parser/src/parse/number.rs
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
use crate::hir::syntax_shape::FlatShape;
|
||||||
|
use crate::parse::parser::Number;
|
||||||
|
use bigdecimal::BigDecimal;
|
||||||
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Text};
|
||||||
|
use num_bigint::BigInt;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
|
pub enum RawNumber {
|
||||||
|
Int(Span),
|
||||||
|
Decimal(Span),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for RawNumber {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
match self {
|
||||||
|
RawNumber::Int(span) => *span,
|
||||||
|
RawNumber::Decimal(span) => *span,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for RawNumber {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
match self {
|
||||||
|
RawNumber::Int(span) => b::primitive(span.slice(source)),
|
||||||
|
RawNumber::Decimal(span) => b::primitive(span.slice(source)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RawNumber {
|
||||||
|
pub fn as_flat_shape(&self) -> FlatShape {
|
||||||
|
match self {
|
||||||
|
RawNumber::Int(_) => FlatShape::Int,
|
||||||
|
RawNumber::Decimal(_) => FlatShape::Decimal,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn int(span: impl Into<Span>) -> RawNumber {
|
||||||
|
let span = span.into();
|
||||||
|
|
||||||
|
RawNumber::Int(span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal(span: impl Into<Span>) -> RawNumber {
|
||||||
|
let span = span.into();
|
||||||
|
|
||||||
|
RawNumber::Decimal(span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_number(self, source: &Text) -> Number {
|
||||||
|
match self {
|
||||||
|
RawNumber::Int(tag) => {
|
||||||
|
if let Ok(big_int) = BigInt::from_str(tag.slice(source)) {
|
||||||
|
Number::Int(big_int)
|
||||||
|
} else {
|
||||||
|
unreachable!("Internal error: could not parse text as BigInt as expected")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
RawNumber::Decimal(tag) => {
|
||||||
|
if let Ok(big_decimal) = BigDecimal::from_str(tag.slice(source)) {
|
||||||
|
Number::Decimal(big_decimal)
|
||||||
|
} else {
|
||||||
|
unreachable!("Internal error: could not parse text as BigDecimal as expected")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,8 +1,8 @@
|
|||||||
#![allow(unused)]
|
#![allow(unused)]
|
||||||
|
|
||||||
use crate::parse::{
|
use crate::parse::{
|
||||||
call_node::*, flag::*, operator::*, pipeline::*, token_tree::*, token_tree_builder::*,
|
call_node::*, flag::*, number::*, operator::*, pipeline::*, token_tree::*,
|
||||||
tokens::*, unit::*,
|
token_tree_builder::*, unit::*,
|
||||||
};
|
};
|
||||||
use nom;
|
use nom;
|
||||||
use nom::branch::*;
|
use nom::branch::*;
|
||||||
@ -36,7 +36,7 @@ use std::str::FromStr;
|
|||||||
macro_rules! cmp_operator {
|
macro_rules! cmp_operator {
|
||||||
($name:tt : $token:tt ) => {
|
($name:tt : $token:tt ) => {
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn $name(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn $name(input: NomSpan) -> IResult<NomSpan, $crate::parse::token_tree::SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, tag) = tag($token)(input)?;
|
let (input, tag) = tag($token)(input)?;
|
||||||
let end = input.offset;
|
let end = input.offset;
|
||||||
@ -52,7 +52,7 @@ macro_rules! cmp_operator {
|
|||||||
macro_rules! eval_operator {
|
macro_rules! eval_operator {
|
||||||
($name:tt : $token:tt ) => {
|
($name:tt : $token:tt ) => {
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn $name(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn $name(input: NomSpan) -> IResult<NomSpan, $crate::parse::token_tree::SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, tag) = tag($token)(input)?;
|
let (input, tag) = tag($token)(input)?;
|
||||||
let end = input.offset;
|
let end = input.offset;
|
||||||
@ -209,7 +209,7 @@ impl Into<Number> for BigInt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn number(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn number(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, number) = raw_number(input)?;
|
let (input, number) = raw_number(input)?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
@ -218,12 +218,36 @@ pub fn number(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracable_parser]
|
||||||
|
pub fn int_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
|
let start = input.offset;
|
||||||
|
let (input, head) = digit1(input)?;
|
||||||
|
|
||||||
|
match input.fragment.chars().next() {
|
||||||
|
None | Some('.') => Ok((
|
||||||
|
input,
|
||||||
|
Token::Number(RawNumber::int((start, input.offset)))
|
||||||
|
.into_spanned((start, input.offset)),
|
||||||
|
)),
|
||||||
|
other if is_boundary(other) => Ok((
|
||||||
|
input,
|
||||||
|
Token::Number(RawNumber::int((start, input.offset)))
|
||||||
|
.into_spanned((start, input.offset)),
|
||||||
|
)),
|
||||||
|
_ => Err(nom::Err::Error(nom::error::make_error(
|
||||||
|
input,
|
||||||
|
nom::error::ErrorKind::Tag,
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
||||||
let anchoral = input;
|
let anchoral = input;
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, neg) = opt(tag("-"))(input)?;
|
let (input, neg) = opt(tag("-"))(input)?;
|
||||||
let (input, head) = digit1(input)?;
|
let (input, head) = digit1(input)?;
|
||||||
|
let after_int_head = input;
|
||||||
|
|
||||||
match input.fragment.chars().next() {
|
match input.fragment.chars().next() {
|
||||||
None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
||||||
@ -255,7 +279,17 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
|||||||
Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
||||||
};
|
};
|
||||||
|
|
||||||
let (input, tail) = digit1(input)?;
|
let tail_digits_result: IResult<NomSpan, _> = digit1(input);
|
||||||
|
|
||||||
|
let (input, tail) = match tail_digits_result {
|
||||||
|
Ok((input, tail)) => (input, tail),
|
||||||
|
Err(_) => {
|
||||||
|
return Ok((
|
||||||
|
after_int_head,
|
||||||
|
RawNumber::int((start, after_int_head.offset)),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let end = input.offset;
|
let end = input.offset;
|
||||||
|
|
||||||
@ -272,14 +306,14 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn operator(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn operator(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, operator) = alt((gte, lte, neq, gt, lt, eq, cont, ncont))(input)?;
|
let (input, operator) = alt((gte, lte, neq, gt, lt, eq, cont, ncont))(input)?;
|
||||||
|
|
||||||
Ok((input, operator))
|
Ok((input, operator))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn dq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn dq_string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = char('"')(input)?;
|
let (input, _) = char('"')(input)?;
|
||||||
let start1 = input.offset;
|
let start1 = input.offset;
|
||||||
@ -294,7 +328,7 @@ pub fn dq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn sq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn sq_string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = char('\'')(input)?;
|
let (input, _) = char('\'')(input)?;
|
||||||
let start1 = input.offset;
|
let start1 = input.offset;
|
||||||
@ -310,12 +344,12 @@ pub fn sq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
alt((sq_string, dq_string))(input)
|
alt((sq_string, dq_string))(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn external(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn external(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = tag("^")(input)?;
|
let (input, _) = tag("^")(input)?;
|
||||||
let (input, bare) = take_while(is_file_char)(input)?;
|
let (input, bare) = take_while(is_file_char)(input)?;
|
||||||
@ -373,7 +407,7 @@ pub fn matches(cond: fn(char) -> bool) -> impl Fn(NomSpan) -> IResult<NomSpan, N
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn pattern(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn pattern(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
word(
|
word(
|
||||||
start_pattern,
|
start_pattern,
|
||||||
matches(is_glob_char),
|
matches(is_glob_char),
|
||||||
@ -387,7 +421,7 @@ pub fn start_pattern(input: NomSpan) -> IResult<NomSpan, NomSpan> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn filename(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn filename(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start_pos = input.offset;
|
let start_pos = input.offset;
|
||||||
|
|
||||||
let (mut input, mut saw_special) = match start_file_char(input) {
|
let (mut input, mut saw_special) = match start_file_char(input) {
|
||||||
@ -495,7 +529,7 @@ pub fn start_filename(input: NomSpan) -> IResult<NomSpan, NomSpan> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn bare_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
word(
|
word(
|
||||||
matches(is_start_member_char),
|
matches(is_start_member_char),
|
||||||
matches(is_member_char),
|
matches(is_member_char),
|
||||||
@ -503,13 +537,22 @@ pub fn member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
)(input)
|
)(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracable_parser]
|
||||||
|
pub fn garbage_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
|
word(
|
||||||
|
matches(is_garbage_member_char),
|
||||||
|
matches(is_garbage_member_char),
|
||||||
|
TokenTreeBuilder::spanned_garbage,
|
||||||
|
)(input)
|
||||||
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn ident(input: NomSpan) -> IResult<NomSpan, Tag> {
|
pub fn ident(input: NomSpan) -> IResult<NomSpan, Tag> {
|
||||||
word(matches(is_id_start), matches(is_id_continue), Tag::from)(input)
|
word(matches(is_id_start), matches(is_id_continue), Tag::from)(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn external_word(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn external_word(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = take_while1(is_external_word_char)(input)?;
|
let (input, _) = take_while1(is_external_word_char)(input)?;
|
||||||
let end = input.offset;
|
let end = input.offset;
|
||||||
@ -517,22 +560,48 @@ pub fn external_word(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
Ok((input, TokenTreeBuilder::spanned_external_word((start, end))))
|
Ok((input, TokenTreeBuilder::spanned_external_word((start, end))))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum OneOf<T, U> {
|
||||||
|
First(T),
|
||||||
|
Second(U),
|
||||||
|
}
|
||||||
|
|
||||||
|
trait SubParser<'a, T>: Sized + Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, T> {}
|
||||||
|
|
||||||
|
impl<'a, T, U> SubParser<'a, U> for T where T: Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, U> {}
|
||||||
|
|
||||||
|
fn one_of<'a, T, U>(
|
||||||
|
first: impl SubParser<'a, T>,
|
||||||
|
second: impl SubParser<'a, U>,
|
||||||
|
) -> impl SubParser<'a, OneOf<T, U>> {
|
||||||
|
move |input: NomSpan<'a>| -> IResult<NomSpan, OneOf<T, U>> {
|
||||||
|
let first_result = first(input);
|
||||||
|
|
||||||
|
match first_result {
|
||||||
|
Ok((input, val)) => Ok((input, OneOf::First(val))),
|
||||||
|
Err(_) => {
|
||||||
|
let (input, val) = second(input)?;
|
||||||
|
Ok((input, OneOf::Second(val)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn var(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn var(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = tag("$")(input)?;
|
let (input, _) = tag("$")(input)?;
|
||||||
let (input, bare) = ident(input)?;
|
let (input, name) = one_of(tag("it"), ident)(input)?;
|
||||||
let end = input.offset;
|
let end = input.offset;
|
||||||
|
|
||||||
Ok((
|
match name {
|
||||||
input,
|
OneOf::First(it) => Ok((input, TokenTreeBuilder::spanned_it_var(it, (start, end)))),
|
||||||
TokenTreeBuilder::spanned_var(bare, Span::new(start, end)),
|
OneOf::Second(name) => Ok((input, TokenTreeBuilder::spanned_var(name, (start, end)))),
|
||||||
))
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tight<'a>(
|
fn tight<'a>(
|
||||||
parser: impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<TokenNode>>,
|
parser: impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<SpannedToken>>,
|
||||||
) -> impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<TokenNode>> {
|
) -> impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<SpannedToken>> {
|
||||||
move |input: NomSpan| {
|
move |input: NomSpan| {
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
let (input, head) = parser(input)?;
|
let (input, head) = parser(input)?;
|
||||||
@ -560,7 +629,7 @@ fn tight<'a>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn flag(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = tag("--")(input)?;
|
let (input, _) = tag("--")(input)?;
|
||||||
let (input, bare) = filename(input)?;
|
let (input, bare) = filename(input)?;
|
||||||
@ -573,7 +642,7 @@ pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn shorthand(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn shorthand(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = tag("-")(input)?;
|
let (input, _) = tag("-")(input)?;
|
||||||
let (input, bare) = filename(input)?;
|
let (input, bare) = filename(input)?;
|
||||||
@ -586,14 +655,14 @@ pub fn shorthand(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn leaf(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn leaf(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?;
|
let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?;
|
||||||
|
|
||||||
Ok((input, node))
|
Ok((input, node))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
|
pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<SpannedToken>>> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let mut node_list = vec![];
|
let mut node_list = vec![];
|
||||||
|
|
||||||
@ -658,7 +727,7 @@ pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
|
pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<SpannedToken>>> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, pre_ws) = opt(any_space)(input)?;
|
let (input, pre_ws) = opt(any_space)(input)?;
|
||||||
let (input, items) = token_list(input)?;
|
let (input, items) = token_list(input)?;
|
||||||
@ -679,10 +748,10 @@ pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNo
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn make_token_list(
|
fn make_token_list(
|
||||||
first: Vec<TokenNode>,
|
first: Vec<SpannedToken>,
|
||||||
list: Vec<(Vec<TokenNode>, Vec<TokenNode>)>,
|
list: Vec<(Vec<SpannedToken>, Vec<SpannedToken>)>,
|
||||||
sp_right: Option<TokenNode>,
|
sp_right: Option<SpannedToken>,
|
||||||
) -> Vec<TokenNode> {
|
) -> Vec<SpannedToken> {
|
||||||
let mut nodes = vec![];
|
let mut nodes = vec![];
|
||||||
|
|
||||||
nodes.extend(first);
|
nodes.extend(first);
|
||||||
@ -700,7 +769,7 @@ fn make_token_list(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn separator(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn separator(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let left = input.offset;
|
let left = input.offset;
|
||||||
let (input, ws1) = alt((tag(";"), tag("\n")))(input)?;
|
let (input, ws1) = alt((tag(";"), tag("\n")))(input)?;
|
||||||
let right = input.offset;
|
let right = input.offset;
|
||||||
@ -709,7 +778,7 @@ pub fn separator(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn whitespace(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let left = input.offset;
|
let left = input.offset;
|
||||||
let (input, ws1) = space1(input)?;
|
let (input, ws1) = space1(input)?;
|
||||||
let right = input.offset;
|
let right = input.offset;
|
||||||
@ -718,7 +787,7 @@ pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn any_space(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
pub fn any_space(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||||
let left = input.offset;
|
let left = input.offset;
|
||||||
let (input, tokens) = many1(alt((whitespace, separator, comment)))(input)?;
|
let (input, tokens) = many1(alt((whitespace, separator, comment)))(input)?;
|
||||||
let right = input.offset;
|
let right = input.offset;
|
||||||
@ -727,7 +796,7 @@ pub fn any_space(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn comment(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn comment(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let left = input.offset;
|
let left = input.offset;
|
||||||
let (input, start) = tag("#")(input)?;
|
let (input, start) = tag("#")(input)?;
|
||||||
let (input, rest) = not_line_ending(input)?;
|
let (input, rest) = not_line_ending(input)?;
|
||||||
@ -744,7 +813,7 @@ pub fn comment(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
pub fn delimited(
|
pub fn delimited(
|
||||||
input: NomSpan,
|
input: NomSpan,
|
||||||
delimiter: Delimiter,
|
delimiter: Delimiter,
|
||||||
) -> IResult<NomSpan, (Span, Span, Spanned<Vec<TokenNode>>)> {
|
) -> IResult<NomSpan, (Span, Span, Spanned<Vec<SpannedToken>>)> {
|
||||||
let left = input.offset;
|
let left = input.offset;
|
||||||
let (input, open_span) = tag(delimiter.open())(input)?;
|
let (input, open_span) = tag(delimiter.open())(input)?;
|
||||||
let (input, inner_items) = opt(spaced_token_list)(input)?;
|
let (input, inner_items) = opt(spaced_token_list)(input)?;
|
||||||
@ -768,7 +837,7 @@ pub fn delimited(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, (left, right, tokens)) = delimited(input, Delimiter::Paren)?;
|
let (input, (left, right, tokens)) = delimited(input, Delimiter::Paren)?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
@ -778,7 +847,7 @@ pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, (left, right, tokens)) = delimited(input, Delimiter::Square)?;
|
let (input, (left, right, tokens)) = delimited(input, Delimiter::Square)?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
@ -788,7 +857,7 @@ pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn delimited_brace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn delimited_brace(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, (left, right, tokens)) = delimited(input, Delimiter::Brace)?;
|
let (input, (left, right, tokens)) = delimited(input, Delimiter::Brace)?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
@ -810,7 +879,7 @@ pub fn raw_call(input: NomSpan) -> IResult<NomSpan, Spanned<CallNode>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
pub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||||
let original = input;
|
let original = input;
|
||||||
|
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
@ -824,7 +893,7 @@ pub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn dot_member(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
pub fn dot_member(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||||
let (input, dot_result) = dot(input)?;
|
let (input, dot_result) = dot(input)?;
|
||||||
let (input, member_result) = any_member(input)?;
|
let (input, member_result) = any_member(input)?;
|
||||||
|
|
||||||
@ -832,12 +901,12 @@ pub fn dot_member(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn any_member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn any_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
alt((number, string, member))(input)
|
alt((int_member, string, bare_member, garbage_member))(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn tight_node(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
pub fn tight_node(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||||
alt((
|
alt((
|
||||||
tight(to_list(leaf)),
|
tight(to_list(leaf)),
|
||||||
tight(to_list(filename)),
|
tight(to_list(filename)),
|
||||||
@ -851,8 +920,8 @@ pub fn tight_node(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn to_list(
|
fn to_list(
|
||||||
parser: impl Fn(NomSpan) -> IResult<NomSpan, TokenNode>,
|
parser: impl Fn(NomSpan) -> IResult<NomSpan, SpannedToken>,
|
||||||
) -> impl Fn(NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
) -> impl Fn(NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||||
move |input| {
|
move |input| {
|
||||||
let (input, next) = parser(input)?;
|
let (input, next) = parser(input)?;
|
||||||
|
|
||||||
@ -861,17 +930,18 @@ fn to_list(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn nodes(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn nodes(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, tokens) = token_list(input)?;
|
let (input, tokens) = token_list(input)?;
|
||||||
|
let span = tokens.span;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
input,
|
input,
|
||||||
TokenTreeBuilder::spanned_token_list(tokens.item, tokens.span),
|
TokenTreeBuilder::spanned_pipeline(vec![PipelineElement::new(None, tokens)], span),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn pipeline(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn pipeline(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, head) = spaced_token_list(input)?;
|
let (input, head) = spaced_token_list(input)?;
|
||||||
let (input, items) = many0(tuple((tag("|"), spaced_token_list)))(input)?;
|
let (input, items) = many0(tuple((tag("|"), spaced_token_list)))(input)?;
|
||||||
@ -900,7 +970,7 @@ pub fn pipeline(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn module(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn module(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, tokens) = spaced_token_list(input)?;
|
let (input, tokens) = spaced_token_list(input)?;
|
||||||
|
|
||||||
if input.input_len() != 0 {
|
if input.input_len() != 0 {
|
||||||
@ -938,8 +1008,7 @@ fn is_boundary(c: Option<char>) -> bool {
|
|||||||
|
|
||||||
fn is_external_word_char(c: char) -> bool {
|
fn is_external_word_char(c: char) -> bool {
|
||||||
match c {
|
match c {
|
||||||
';' | '|' | '#' | '-' | '"' | '\'' | '$' | '(' | ')' | '[' | ']' | '{' | '}' | '`'
|
';' | '|' | '"' | '\'' | '$' | '(' | ')' | '[' | ']' | '{' | '}' | '`' => false,
|
||||||
| '.' => false,
|
|
||||||
other if other.is_whitespace() => false,
|
other if other.is_whitespace() => false,
|
||||||
_ => true,
|
_ => true,
|
||||||
}
|
}
|
||||||
@ -999,9 +1068,17 @@ fn is_file_char(c: char) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_garbage_member_char(c: char) -> bool {
|
||||||
|
match c {
|
||||||
|
c if c.is_whitespace() => false,
|
||||||
|
'.' => false,
|
||||||
|
_ => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn is_start_member_char(c: char) -> bool {
|
fn is_start_member_char(c: char) -> bool {
|
||||||
match c {
|
match c {
|
||||||
_ if c.is_alphanumeric() => true,
|
_ if c.is_alphabetic() => true,
|
||||||
'_' => true,
|
'_' => true,
|
||||||
'-' => true,
|
'-' => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
@ -1263,7 +1340,7 @@ mod tests {
|
|||||||
fn test_variable() {
|
fn test_variable() {
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
<nodes>
|
<nodes>
|
||||||
"$it" -> b::token_list(vec![b::var("it")])
|
"$it" -> b::token_list(vec![b::it_var()])
|
||||||
}
|
}
|
||||||
|
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
@ -1354,12 +1431,33 @@ mod tests {
|
|||||||
|
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
<nodes>
|
<nodes>
|
||||||
"$it.print" -> b::token_list(vec![b::var("it"), b::dot(), b::bare("print")])
|
"$it.print" -> b::token_list(vec![b::it_var(), b::dot(), b::bare("print")])
|
||||||
}
|
}
|
||||||
|
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
<nodes>
|
<nodes>
|
||||||
"$it.0" -> b::token_list(vec![b::var("it"), b::dot(), b::int(0)])
|
r#"nu.0xATYKARNU.baz"# -> b::token_list(vec![
|
||||||
|
b::bare("nu"),
|
||||||
|
b::dot(),
|
||||||
|
b::garbage("0xATYKARNU"),
|
||||||
|
b::dot(),
|
||||||
|
b::bare("baz")
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
equal_tokens! {
|
||||||
|
<nodes>
|
||||||
|
"1.b" -> b::token_list(vec![b::int(1), b::dot(), b::bare("b")])
|
||||||
|
}
|
||||||
|
|
||||||
|
equal_tokens! {
|
||||||
|
<nodes>
|
||||||
|
"$it.0" -> b::token_list(vec![b::it_var(), b::dot(), b::int(0)])
|
||||||
|
}
|
||||||
|
|
||||||
|
equal_tokens! {
|
||||||
|
<nodes>
|
||||||
|
"fortune_tellers.2.name" -> b::token_list(vec![b::bare("fortune_tellers"), b::dot(), b::int(2), b::dot(), b::bare("name")])
|
||||||
}
|
}
|
||||||
|
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
@ -1386,7 +1484,7 @@ mod tests {
|
|||||||
vec![
|
vec![
|
||||||
b::parens(vec![
|
b::parens(vec![
|
||||||
b::sp(),
|
b::sp(),
|
||||||
b::var("it"),
|
b::it_var(),
|
||||||
b::dot(),
|
b::dot(),
|
||||||
b::bare("is"),
|
b::bare("is"),
|
||||||
b::dot(),
|
b::dot(),
|
||||||
@ -1407,7 +1505,7 @@ mod tests {
|
|||||||
<nodes>
|
<nodes>
|
||||||
r#"$it."are PAS".0"# -> b::token_list(
|
r#"$it."are PAS".0"# -> b::token_list(
|
||||||
vec![
|
vec![
|
||||||
b::var("it"),
|
b::it_var(),
|
||||||
b::dot(),
|
b::dot(),
|
||||||
b::string("are PAS"),
|
b::string("are PAS"),
|
||||||
b::dot(),
|
b::dot(),
|
||||||
@ -1445,7 +1543,7 @@ mod tests {
|
|||||||
fn test_smoke_single_command_it() {
|
fn test_smoke_single_command_it() {
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
<nodes>
|
<nodes>
|
||||||
"echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::var("it")])
|
"echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::it_var()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1533,6 +1631,17 @@ mod tests {
|
|||||||
]
|
]
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
equal_tokens! {
|
||||||
|
"^echo 1 | ^cat" -> b::pipeline(vec![
|
||||||
|
vec![
|
||||||
|
b::external_command("echo"), b::sp(), b::int(1), b::sp()
|
||||||
|
],
|
||||||
|
vec![
|
||||||
|
b::sp(), b::external_command("cat")
|
||||||
|
]
|
||||||
|
])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1631,7 +1740,7 @@ mod tests {
|
|||||||
// b::bare("where"),
|
// b::bare("where"),
|
||||||
// vec![
|
// vec![
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
// b::var("it"),
|
// b::it_var(),
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
// b::op("!="),
|
// b::op("!="),
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
@ -1654,7 +1763,7 @@ mod tests {
|
|||||||
// vec![
|
// vec![
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
// b::braced(vec![
|
// b::braced(vec![
|
||||||
// b::path(b::var("it"), vec![b::member("size")]),
|
// b::path(b::it_var(), vec![b::member("size")]),
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
// b::op(">"),
|
// b::op(">"),
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
@ -1669,10 +1778,13 @@ mod tests {
|
|||||||
// }
|
// }
|
||||||
|
|
||||||
fn apply(
|
fn apply(
|
||||||
f: impl Fn(NomSpan) -> Result<(NomSpan, TokenNode), nom::Err<(NomSpan, nom::error::ErrorKind)>>,
|
f: impl Fn(
|
||||||
|
NomSpan,
|
||||||
|
)
|
||||||
|
-> Result<(NomSpan, SpannedToken), nom::Err<(NomSpan, nom::error::ErrorKind)>>,
|
||||||
desc: &str,
|
desc: &str,
|
||||||
string: &str,
|
string: &str,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
let result = f(nom_input(string));
|
let result = f(nom_input(string));
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
@ -1693,20 +1805,15 @@ mod tests {
|
|||||||
|
|
||||||
fn delimited(
|
fn delimited(
|
||||||
delimiter: Spanned<Delimiter>,
|
delimiter: Spanned<Delimiter>,
|
||||||
children: Vec<TokenNode>,
|
children: Vec<SpannedToken>,
|
||||||
left: usize,
|
left: usize,
|
||||||
right: usize,
|
right: usize,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
let start = Span::for_char(left);
|
let start = Span::for_char(left);
|
||||||
let end = Span::for_char(right);
|
let end = Span::for_char(right);
|
||||||
|
|
||||||
let node = DelimitedNode::new(delimiter.item, (start, end), children);
|
let node = DelimitedNode::new(delimiter.item, (start, end), children);
|
||||||
let spanned = node.spanned(Span::new(left, right));
|
Token::Delimited(node).into_spanned((left, right))
|
||||||
TokenNode::Delimited(spanned)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn token(token: UnspannedToken, left: usize, right: usize) -> TokenNode {
|
|
||||||
TokenNode::Token(token.into_token(Span::new(left, right)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build<T>(block: CurriedNode<T>) -> T {
|
fn build<T>(block: CurriedNode<T>) -> T {
|
||||||
@ -1714,7 +1821,7 @@ mod tests {
|
|||||||
block(&mut builder)
|
block(&mut builder)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_token(block: CurriedToken) -> TokenNode {
|
fn build_token(block: CurriedToken) -> SpannedToken {
|
||||||
TokenTreeBuilder::build(block).0
|
TokenTreeBuilder::build(block).0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,23 +1,32 @@
|
|||||||
use crate::TokenNode;
|
use crate::{SpannedToken, Token};
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned};
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
|
||||||
pub struct Pipeline {
|
pub struct Pipeline {
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
pub(crate) parts: Vec<PipelineElement>,
|
pub(crate) parts: Vec<PipelineElement>,
|
||||||
pub(crate) span: Span,
|
}
|
||||||
|
|
||||||
|
impl IntoSpanned for Pipeline {
|
||||||
|
type Output = Spanned<Pipeline>;
|
||||||
|
|
||||||
|
fn into_spanned(self, span: impl Into<Span>) -> Self::Output {
|
||||||
|
self.spanned(span.into())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||||
pub struct Tokens {
|
pub struct Tokens {
|
||||||
pub(crate) tokens: Vec<TokenNode>,
|
pub(crate) tokens: Vec<SpannedToken>,
|
||||||
pub(crate) span: Span,
|
pub(crate) span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Tokens {
|
impl Tokens {
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &TokenNode> {
|
pub fn iter(&self) -> impl Iterator<Item = &SpannedToken> {
|
||||||
self.tokens.iter()
|
self.tokens.iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -38,7 +47,7 @@ impl HasSpan for PipelineElement {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PipelineElement {
|
impl PipelineElement {
|
||||||
pub fn new(pipe: Option<Span>, tokens: Spanned<Vec<TokenNode>>) -> PipelineElement {
|
pub fn new(pipe: Option<Span>, tokens: Spanned<Vec<SpannedToken>>) -> PipelineElement {
|
||||||
PipelineElement {
|
PipelineElement {
|
||||||
pipe,
|
pipe,
|
||||||
tokens: Tokens {
|
tokens: Tokens {
|
||||||
@ -48,7 +57,7 @@ impl PipelineElement {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tokens(&self) -> &[TokenNode] {
|
pub fn tokens(&self) -> &[SpannedToken] {
|
||||||
&self.tokens.tokens
|
&self.tokens.tokens
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -65,9 +74,9 @@ impl PrettyDebugWithSource for Pipeline {
|
|||||||
impl PrettyDebugWithSource for PipelineElement {
|
impl PrettyDebugWithSource for PipelineElement {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::intersperse(
|
b::intersperse(
|
||||||
self.tokens.iter().map(|token| match token {
|
self.tokens.iter().map(|token| match token.unspanned() {
|
||||||
TokenNode::Whitespace(_) => b::blank(),
|
Token::Whitespace => b::blank(),
|
||||||
token => token.pretty_debug(source),
|
_ => token.pretty_debug(source),
|
||||||
}),
|
}),
|
||||||
b::space(),
|
b::space(),
|
||||||
)
|
)
|
||||||
|
@ -1,162 +1,275 @@
|
|||||||
use crate::parse::{call_node::*, comment::*, flag::*, operator::*, pipeline::*, tokens::*};
|
#![allow(clippy::type_complexity)]
|
||||||
|
use crate::parse::{call_node::*, comment::*, flag::*, number::*, operator::*, pipeline::*};
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_errors::{ParseError, ShellError};
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_protocol::ShellTypeName;
|
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
||||||
use nu_source::{
|
use nu_source::{
|
||||||
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Tagged,
|
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
||||||
TaggedItem, Text,
|
|
||||||
};
|
};
|
||||||
use std::fmt;
|
use std::borrow::Cow;
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||||
pub enum TokenNode {
|
pub enum Token {
|
||||||
Token(Token),
|
Number(RawNumber),
|
||||||
|
CompareOperator(CompareOperator),
|
||||||
|
EvaluationOperator(EvaluationOperator),
|
||||||
|
String(Span),
|
||||||
|
Variable(Span),
|
||||||
|
ItVariable(Span),
|
||||||
|
ExternalCommand(Span),
|
||||||
|
ExternalWord,
|
||||||
|
GlobPattern,
|
||||||
|
Bare,
|
||||||
|
Garbage,
|
||||||
|
|
||||||
Call(Spanned<CallNode>),
|
Call(CallNode),
|
||||||
Nodes(Spanned<Vec<TokenNode>>),
|
Delimited(DelimitedNode),
|
||||||
Delimited(Spanned<DelimitedNode>),
|
|
||||||
Pipeline(Pipeline),
|
Pipeline(Pipeline),
|
||||||
Flag(Flag),
|
Flag(Flag),
|
||||||
Comment(Comment),
|
Comment(Comment),
|
||||||
Whitespace(Span),
|
Whitespace,
|
||||||
Separator(Span),
|
Separator,
|
||||||
|
|
||||||
Error(Spanned<ShellError>),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for TokenNode {
|
macro_rules! token_type {
|
||||||
|
(struct $name:tt (desc: $desc:tt) -> $out:ty { |$span:ident, $pat:pat| => $do:expr }) => {
|
||||||
|
pub struct $name;
|
||||||
|
|
||||||
|
impl TokenType for $name {
|
||||||
|
type Output = $out;
|
||||||
|
|
||||||
|
fn desc(&self) -> Cow<'static, str> {
|
||||||
|
Cow::Borrowed($desc)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_token_value(
|
||||||
|
&self,
|
||||||
|
token: &SpannedToken,
|
||||||
|
err: ParseErrorFn<$out>,
|
||||||
|
) -> Result<$out, ParseError> {
|
||||||
|
let $span = token.span();
|
||||||
|
|
||||||
|
match *token.unspanned() {
|
||||||
|
$pat => Ok($do),
|
||||||
|
_ => err(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
(struct $name:tt (desc: $desc:tt) -> $out:ty { $pat:pat => $do:expr }) => {
|
||||||
|
pub struct $name;
|
||||||
|
|
||||||
|
impl TokenType for $name {
|
||||||
|
type Output = $out;
|
||||||
|
|
||||||
|
fn desc(&self) -> Cow<'static, str> {
|
||||||
|
Cow::Borrowed($desc)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_token_value(
|
||||||
|
&self,
|
||||||
|
token: &SpannedToken,
|
||||||
|
err: ParseErrorFn<$out>,
|
||||||
|
) -> Result<$out, ParseError> {
|
||||||
|
match token.unspanned().clone() {
|
||||||
|
$pat => Ok($do),
|
||||||
|
_ => err(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type ParseErrorFn<'a, T> = &'a dyn Fn() -> Result<T, ParseError>;
|
||||||
|
|
||||||
|
token_type!(struct IntType (desc: "integer") -> RawNumber {
|
||||||
|
Token::Number(number @ RawNumber::Int(_)) => number
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct DecimalType (desc: "decimal") -> RawNumber {
|
||||||
|
Token::Number(number @ RawNumber::Decimal(_)) => number
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct StringType (desc: "string") -> (Span, Span) {
|
||||||
|
|outer, Token::String(inner)| => (inner, outer)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct BareType (desc: "word") -> Span {
|
||||||
|
|span, Token::Bare| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct DotType (desc: "dot") -> Span {
|
||||||
|
|span, Token::EvaluationOperator(EvaluationOperator::Dot)| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct DotDotType (desc: "dotdot") -> Span {
|
||||||
|
|span, Token::EvaluationOperator(EvaluationOperator::DotDot)| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct CompareOperatorType (desc: "compare operator") -> (Span, CompareOperator) {
|
||||||
|
|span, Token::CompareOperator(operator)| => (span, operator)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct ExternalWordType (desc: "external word") -> Span {
|
||||||
|
|span, Token::ExternalWord| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct ExternalCommandType (desc: "external command") -> (Span, Span) {
|
||||||
|
|outer, Token::ExternalCommand(inner)| => (inner, outer)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct CommentType (desc: "comment") -> (Comment, Span) {
|
||||||
|
|outer, Token::Comment(comment)| => (comment, outer)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct SeparatorType (desc: "separator") -> Span {
|
||||||
|
|span, Token::Separator| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct WhitespaceType (desc: "whitespace") -> Span {
|
||||||
|
|span, Token::Whitespace| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct WordType (desc: "word") -> Span {
|
||||||
|
|span, Token::Bare| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct ItVarType (desc: "$it") -> (Span, Span) {
|
||||||
|
|outer, Token::ItVariable(inner)| => (inner, outer)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct VarType (desc: "variable") -> (Span, Span) {
|
||||||
|
|outer, Token::Variable(inner)| => (inner, outer)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct PipelineType (desc: "pipeline") -> Pipeline {
|
||||||
|
Token::Pipeline(pipeline) => pipeline
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct BlockType (desc: "block") -> DelimitedNode {
|
||||||
|
Token::Delimited(block @ DelimitedNode { delimiter: Delimiter::Brace, .. }) => block
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct SquareType (desc: "square") -> DelimitedNode {
|
||||||
|
Token::Delimited(square @ DelimitedNode { delimiter: Delimiter::Square, .. }) => square
|
||||||
|
});
|
||||||
|
|
||||||
|
pub trait TokenType {
|
||||||
|
type Output;
|
||||||
|
|
||||||
|
fn desc(&self) -> Cow<'static, str>;
|
||||||
|
|
||||||
|
fn extract_token_value(
|
||||||
|
&self,
|
||||||
|
token: &SpannedToken,
|
||||||
|
err: ParseErrorFn<Self::Output>,
|
||||||
|
) -> Result<Self::Output, ParseError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Token {
|
||||||
|
pub fn into_spanned(self, span: impl Into<Span>) -> SpannedToken {
|
||||||
|
SpannedToken {
|
||||||
|
unspanned: self,
|
||||||
|
span: span.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||||
|
pub struct SpannedToken {
|
||||||
|
#[get = "pub"]
|
||||||
|
unspanned: Token,
|
||||||
|
span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for SpannedToken {
|
||||||
|
type Target = Token;
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.unspanned
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for SpannedToken {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ShellTypeName for SpannedToken {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
self.unspanned.type_name()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for SpannedToken {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(token) => token.pretty_debug(source),
|
Token::Number(number) => number.pretty_debug(source),
|
||||||
TokenNode::Call(call) => call.pretty_debug(source),
|
Token::CompareOperator(operator) => operator.pretty_debug(source),
|
||||||
TokenNode::Nodes(nodes) => b::intersperse(
|
Token::EvaluationOperator(operator) => operator.pretty_debug(source),
|
||||||
nodes.iter().map(|node| node.pretty_debug(source)),
|
Token::String(_) | Token::GlobPattern | Token::Bare => {
|
||||||
b::space(),
|
b::primitive(self.span.slice(source))
|
||||||
),
|
}
|
||||||
TokenNode::Delimited(delimited) => delimited.pretty_debug(source),
|
Token::Variable(_) => b::var(self.span.slice(source)),
|
||||||
TokenNode::Pipeline(pipeline) => pipeline.pretty_debug(source),
|
Token::ItVariable(_) => b::keyword(self.span.slice(source)),
|
||||||
TokenNode::Flag(flag) => flag.pretty_debug(source),
|
Token::ExternalCommand(_) => b::description(self.span.slice(source)),
|
||||||
TokenNode::Whitespace(space) => b::typed(
|
Token::ExternalWord => b::description(self.span.slice(source)),
|
||||||
|
Token::Call(call) => call.pretty_debug(source),
|
||||||
|
Token::Delimited(delimited) => delimited.pretty_debug(source),
|
||||||
|
Token::Pipeline(pipeline) => pipeline.pretty_debug(source),
|
||||||
|
Token::Flag(flag) => flag.pretty_debug(source),
|
||||||
|
Token::Garbage => b::error(self.span.slice(source)),
|
||||||
|
Token::Whitespace => b::typed(
|
||||||
"whitespace",
|
"whitespace",
|
||||||
b::description(format!("{:?}", space.slice(source))),
|
b::description(format!("{:?}", self.span.slice(source))),
|
||||||
),
|
),
|
||||||
TokenNode::Separator(span) => b::typed(
|
Token::Separator => b::typed(
|
||||||
"separator",
|
"separator",
|
||||||
b::description(format!("{:?}", span.slice(source))),
|
b::description(format!("{:?}", self.span.slice(source))),
|
||||||
),
|
),
|
||||||
TokenNode::Comment(comment) => {
|
Token::Comment(comment) => {
|
||||||
b::typed("comment", b::description(comment.text.slice(source)))
|
b::typed("comment", b::description(comment.text.slice(source)))
|
||||||
}
|
}
|
||||||
TokenNode::Error(_) => b::error("error"),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ShellTypeName for TokenNode {
|
impl ShellTypeName for Token {
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
TokenNode::Token(t) => t.type_name(),
|
Token::Number(_) => "number",
|
||||||
TokenNode::Nodes(_) => "nodes",
|
Token::CompareOperator(_) => "comparison operator",
|
||||||
TokenNode::Call(_) => "command",
|
Token::EvaluationOperator(EvaluationOperator::Dot) => "dot",
|
||||||
TokenNode::Delimited(d) => d.type_name(),
|
Token::EvaluationOperator(EvaluationOperator::DotDot) => "dot dot",
|
||||||
TokenNode::Pipeline(_) => "pipeline",
|
Token::String(_) => "string",
|
||||||
TokenNode::Flag(_) => "flag",
|
Token::Variable(_) => "variable",
|
||||||
TokenNode::Whitespace(_) => "whitespace",
|
Token::ItVariable(_) => "it variable",
|
||||||
TokenNode::Separator(_) => "separator",
|
Token::ExternalCommand(_) => "external command",
|
||||||
TokenNode::Comment(_) => "comment",
|
Token::ExternalWord => "external word",
|
||||||
TokenNode::Error(_) => "error",
|
Token::GlobPattern => "glob pattern",
|
||||||
|
Token::Bare => "word",
|
||||||
|
Token::Call(_) => "command",
|
||||||
|
Token::Delimited(d) => d.type_name(),
|
||||||
|
Token::Pipeline(_) => "pipeline",
|
||||||
|
Token::Flag(_) => "flag",
|
||||||
|
Token::Garbage => "garbage",
|
||||||
|
Token::Whitespace => "whitespace",
|
||||||
|
Token::Separator => "separator",
|
||||||
|
Token::Comment(_) => "comment",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct DebugTokenNode<'a> {
|
impl From<&SpannedToken> for Span {
|
||||||
node: &'a TokenNode,
|
fn from(token: &SpannedToken) -> Span {
|
||||||
source: &'a Text,
|
token.span
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for DebugTokenNode<'_> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match self.node {
|
|
||||||
TokenNode::Token(t) => write!(f, "{:?}", t.debug(self.source)),
|
|
||||||
TokenNode::Call(s) => {
|
|
||||||
write!(f, "(")?;
|
|
||||||
|
|
||||||
write!(f, "{}", s.head().debug(self.source))?;
|
|
||||||
|
|
||||||
if let Some(children) = s.children() {
|
|
||||||
for child in children {
|
|
||||||
write!(f, "{}", child.debug(self.source))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(f, ")")
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenNode::Delimited(d) => {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
match d.delimiter {
|
|
||||||
Delimiter::Brace => "{",
|
|
||||||
Delimiter::Paren => "(",
|
|
||||||
Delimiter::Square => "[",
|
|
||||||
}
|
|
||||||
)?;
|
|
||||||
|
|
||||||
for child in d.children() {
|
|
||||||
write!(f, "{:?}", child.old_debug(self.source))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
match d.delimiter {
|
|
||||||
Delimiter::Brace => "}",
|
|
||||||
Delimiter::Paren => ")",
|
|
||||||
Delimiter::Square => "]",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)),
|
|
||||||
TokenNode::Error(_) => write!(f, "<error>"),
|
|
||||||
rest => write!(f, "{}", rest.span().slice(self.source)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&TokenNode> for Span {
|
impl SpannedToken {
|
||||||
fn from(token: &TokenNode) -> Span {
|
|
||||||
token.span()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for TokenNode {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
TokenNode::Token(t) => t.span,
|
|
||||||
TokenNode::Nodes(t) => t.span,
|
|
||||||
TokenNode::Call(s) => s.span,
|
|
||||||
TokenNode::Delimited(s) => s.span,
|
|
||||||
TokenNode::Pipeline(s) => s.span,
|
|
||||||
TokenNode::Flag(s) => s.span,
|
|
||||||
TokenNode::Whitespace(s) => *s,
|
|
||||||
TokenNode::Separator(s) => *s,
|
|
||||||
TokenNode::Comment(c) => c.span(),
|
|
||||||
TokenNode::Error(s) => s.span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenNode {
|
|
||||||
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
|
||||||
self.type_name().tagged(self.span())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
|
|
||||||
DebugTokenNode { node: self, source }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_external_arg(&self, source: &Text) -> String {
|
pub fn as_external_arg(&self, source: &Text) -> String {
|
||||||
self.span().slice(source).to_string()
|
self.span().slice(source).to_string()
|
||||||
}
|
}
|
||||||
@ -166,145 +279,105 @@ impl TokenNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
|
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Variable(inner_span) => Ok((self.span(), *inner_span)),
|
||||||
unspanned: UnspannedToken::Variable(inner_span),
|
_ => Err(ShellError::type_error("variable", self.spanned_type_name())),
|
||||||
span: outer_span,
|
|
||||||
}) => Ok((*outer_span, *inner_span)),
|
|
||||||
_ => Err(ShellError::type_error(
|
|
||||||
"variable",
|
|
||||||
self.type_name().spanned(self.span()),
|
|
||||||
)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_bare(&self) -> bool {
|
pub fn is_bare(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Bare => true,
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_string(&self) -> bool {
|
pub fn is_string(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::String(_) => true,
|
||||||
unspanned: UnspannedToken::String(_),
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_number(&self) -> bool {
|
pub fn is_number(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Number(_) => true,
|
||||||
unspanned: UnspannedToken::Number(_),
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_string(&self) -> Option<(Span, Span)> {
|
pub fn as_string(&self) -> Option<(Span, Span)> {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::String(inner_span) => Some((self.span(), *inner_span)),
|
||||||
unspanned: UnspannedToken::String(inner_span),
|
|
||||||
span: outer_span,
|
|
||||||
}) => Some((*outer_span, *inner_span)),
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_pattern(&self) -> bool {
|
pub fn is_pattern(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::GlobPattern => true,
|
||||||
unspanned: UnspannedToken::GlobPattern,
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_word(&self) -> bool {
|
pub fn is_word(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Bare => true,
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_int(&self) -> bool {
|
pub fn is_int(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Number(RawNumber::Int(_)) => true,
|
||||||
unspanned: UnspannedToken::Number(RawNumber::Int(_)),
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_dot(&self) -> bool {
|
pub fn is_dot(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::EvaluationOperator(EvaluationOperator::Dot) => true,
|
||||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> {
|
pub fn as_block(&self) -> Option<(Spanned<&[SpannedToken]>, (Span, Span))> {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Delimited(Spanned {
|
Token::Delimited(DelimitedNode {
|
||||||
item:
|
delimiter,
|
||||||
DelimitedNode {
|
children,
|
||||||
delimiter,
|
spans,
|
||||||
children,
|
}) if *delimiter == Delimiter::Brace => {
|
||||||
spans,
|
Some(((&children[..]).spanned(self.span()), *spans))
|
||||||
},
|
|
||||||
span,
|
|
||||||
}) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_external(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::ExternalCommand(..),
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Flag> {
|
|
||||||
match self {
|
|
||||||
TokenNode::Flag(flag @ Flag { .. }) if value == flag.name().slice(source) => {
|
|
||||||
Some(*flag)
|
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_external(&self) -> bool {
|
||||||
|
match self.unspanned() {
|
||||||
|
Token::ExternalCommand(..) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Flag> {
|
||||||
|
match self.unspanned() {
|
||||||
|
Token::Flag(flag @ Flag { .. }) if value == flag.name().slice(source) => Some(*flag),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn as_pipeline(&self) -> Result<Pipeline, ParseError> {
|
pub fn as_pipeline(&self) -> Result<Pipeline, ParseError> {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Pipeline(pipeline) => Ok(pipeline.clone()),
|
Token::Pipeline(pipeline) => Ok(pipeline.clone()),
|
||||||
other => Err(ParseError::mismatch(
|
_ => Err(ParseError::mismatch("pipeline", self.spanned_type_name())),
|
||||||
"pipeline",
|
|
||||||
other.type_name().spanned(other.span()),
|
|
||||||
)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_whitespace(&self) -> bool {
|
pub fn is_whitespace(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Whitespace(_) => true,
|
Token::Whitespace => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -315,7 +388,13 @@ impl TokenNode {
|
|||||||
pub struct DelimitedNode {
|
pub struct DelimitedNode {
|
||||||
pub(crate) delimiter: Delimiter,
|
pub(crate) delimiter: Delimiter,
|
||||||
pub(crate) spans: (Span, Span),
|
pub(crate) spans: (Span, Span),
|
||||||
pub(crate) children: Vec<TokenNode>,
|
pub(crate) children: Vec<SpannedToken>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for DelimitedNode {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.spans.0.until(self.spans.1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for DelimitedNode {
|
impl PrettyDebugWithSource for DelimitedNode {
|
||||||
@ -369,79 +448,68 @@ impl Delimiter {
|
|||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub struct PathNode {
|
pub struct PathNode {
|
||||||
head: Box<TokenNode>,
|
head: Box<SpannedToken>,
|
||||||
tail: Vec<TokenNode>,
|
tail: Vec<SpannedToken>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
impl TokenNode {
|
impl SpannedToken {
|
||||||
pub fn expect_external(&self) -> Span {
|
pub fn expect_external(&self) -> Span {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::ExternalCommand(span) => *span,
|
||||||
unspanned: UnspannedToken::ExternalCommand(span),
|
_ => panic!(
|
||||||
..
|
|
||||||
}) => *span,
|
|
||||||
other => panic!(
|
|
||||||
"Only call expect_external if you checked is_external first, found {:?}",
|
"Only call expect_external if you checked is_external first, found {:?}",
|
||||||
other
|
self
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_string(&self) -> (Span, Span) {
|
pub fn expect_string(&self) -> (Span, Span) {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::String(inner_span) => (self.span(), *inner_span),
|
||||||
unspanned: UnspannedToken::String(inner_span),
|
|
||||||
span: outer_span,
|
|
||||||
}) => (*outer_span, *inner_span),
|
|
||||||
other => panic!("Expected string, found {:?}", other),
|
other => panic!("Expected string, found {:?}", other),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_list(&self) -> Spanned<&[TokenNode]> {
|
pub fn expect_list(&self) -> Spanned<Vec<SpannedToken>> {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Nodes(token_nodes) => token_nodes[..].spanned(token_nodes.span),
|
Token::Pipeline(pipeline) => pipeline
|
||||||
other => panic!("Expected list, found {:?}", other),
|
.parts()
|
||||||
|
.iter()
|
||||||
|
.flat_map(|part| part.tokens())
|
||||||
|
.cloned()
|
||||||
|
.collect::<Vec<SpannedToken>>()
|
||||||
|
.spanned(self.span()),
|
||||||
|
_ => panic!("Expected list, found {:?}", self),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_pattern(&self) -> Span {
|
pub fn expect_pattern(&self) -> Span {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::GlobPattern => self.span(),
|
||||||
unspanned: UnspannedToken::GlobPattern,
|
_ => panic!("Expected pattern, found {:?}", self),
|
||||||
span: outer_span,
|
|
||||||
}) => *outer_span,
|
|
||||||
other => panic!("Expected pattern, found {:?}", other),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_var(&self) -> (Span, Span) {
|
pub fn expect_var(&self) -> (Span, Span) {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Variable(inner_span) => (self.span(), *inner_span),
|
||||||
unspanned: UnspannedToken::Variable(inner_span),
|
Token::ItVariable(inner_span) => (self.span(), *inner_span),
|
||||||
span: outer_span,
|
|
||||||
}) => (*outer_span, *inner_span),
|
|
||||||
other => panic!("Expected var, found {:?}", other),
|
other => panic!("Expected var, found {:?}", other),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_dot(&self) -> Span {
|
pub fn expect_dot(&self) -> Span {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::EvaluationOperator(EvaluationOperator::Dot) => self.span(),
|
||||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
|
||||||
span,
|
|
||||||
}) => *span,
|
|
||||||
other => panic!("Expected dot, found {:?}", other),
|
other => panic!("Expected dot, found {:?}", other),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_bare(&self) -> Span {
|
pub fn expect_bare(&self) -> Span {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Bare => self.span(),
|
||||||
unspanned: UnspannedToken::Bare,
|
_ => panic!("Expected bare, found {:?}", self),
|
||||||
span,
|
|
||||||
}) => *span,
|
|
||||||
other => panic!("Expected bare, found {:?}", other),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
use crate::parse::call_node::CallNode;
|
use crate::parse::call_node::CallNode;
|
||||||
use crate::parse::comment::Comment;
|
use crate::parse::comment::Comment;
|
||||||
use crate::parse::flag::{Flag, FlagKind};
|
use crate::parse::flag::{Flag, FlagKind};
|
||||||
|
use crate::parse::number::RawNumber;
|
||||||
use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||||
use crate::parse::pipeline::{Pipeline, PipelineElement};
|
use crate::parse::pipeline::{Pipeline, PipelineElement};
|
||||||
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
use crate::parse::token_tree::{DelimitedNode, Delimiter, SpannedToken, Token};
|
||||||
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
|
||||||
use bigdecimal::BigDecimal;
|
use bigdecimal::BigDecimal;
|
||||||
use nu_source::{Span, Spanned, SpannedItem};
|
use nu_source::{Span, Spanned, SpannedItem};
|
||||||
use num_bigint::BigInt;
|
use num_bigint::BigInt;
|
||||||
@ -21,11 +21,11 @@ impl TokenTreeBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>;
|
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> SpannedToken + 'static>;
|
||||||
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Spanned<CallNode> + 'static>;
|
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Spanned<CallNode> + 'static>;
|
||||||
|
|
||||||
impl TokenTreeBuilder {
|
impl TokenTreeBuilder {
|
||||||
pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
|
pub fn build(block: impl FnOnce(&mut Self) -> SpannedToken) -> (SpannedToken, String) {
|
||||||
let mut builder = TokenTreeBuilder::new();
|
let mut builder = TokenTreeBuilder::new();
|
||||||
let node = block(&mut builder);
|
let node = block(&mut builder);
|
||||||
(node, builder.output)
|
(node, builder.output)
|
||||||
@ -77,8 +77,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Pipeline(Pipeline::new(input, span.into()))
|
Token::Pipeline(Pipeline::new(input)).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
||||||
@ -91,8 +91,28 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_token_list(input: Vec<TokenNode>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_token_list(input: Vec<SpannedToken>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Nodes(input.spanned(span.into()))
|
let span = span.into();
|
||||||
|
Token::Pipeline(Pipeline::new(vec![PipelineElement::new(
|
||||||
|
None,
|
||||||
|
input.spanned(span),
|
||||||
|
)]))
|
||||||
|
.into_spanned(span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn garbage(input: impl Into<String>) -> CurriedToken {
|
||||||
|
let input = input.into();
|
||||||
|
|
||||||
|
Box::new(move |b| {
|
||||||
|
let (start, end) = b.consume(&input);
|
||||||
|
b.pos = end;
|
||||||
|
|
||||||
|
TokenTreeBuilder::spanned_garbage(Span::new(start, end))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spanned_garbage(span: impl Into<Span>) -> SpannedToken {
|
||||||
|
Token::Garbage.into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn op(input: impl Into<CompareOperator>) -> CurriedToken {
|
pub fn op(input: impl Into<CompareOperator>) -> CurriedToken {
|
||||||
@ -107,8 +127,11 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_cmp_op(input: impl Into<CompareOperator>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_cmp_op(
|
||||||
TokenNode::Token(UnspannedToken::CompareOperator(input.into()).into_token(span))
|
input: impl Into<CompareOperator>,
|
||||||
|
span: impl Into<Span>,
|
||||||
|
) -> SpannedToken {
|
||||||
|
Token::CompareOperator(input.into()).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dot() -> CurriedToken {
|
pub fn dot() -> CurriedToken {
|
||||||
@ -134,8 +157,8 @@ impl TokenTreeBuilder {
|
|||||||
pub fn spanned_eval_op(
|
pub fn spanned_eval_op(
|
||||||
input: impl Into<EvaluationOperator>,
|
input: impl Into<EvaluationOperator>,
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::EvaluationOperator(input.into()).into_token(span))
|
Token::EvaluationOperator(input.into()).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
pub fn string(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -154,8 +177,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::String(input.into()).into_token(span))
|
Token::String(input.into()).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bare(input: impl Into<String>) -> CurriedToken {
|
pub fn bare(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -169,8 +192,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_bare(span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_bare(span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::Bare.into_token(span))
|
Token::Bare.into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pattern(input: impl Into<String>) -> CurriedToken {
|
pub fn pattern(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -184,8 +207,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_pattern(input: impl Into<Span>) -> TokenNode {
|
pub fn spanned_pattern(input: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::GlobPattern.into_token(input))
|
Token::GlobPattern.into_spanned(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn external_word(input: impl Into<String>) -> CurriedToken {
|
pub fn external_word(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -199,8 +222,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_external_word(input: impl Into<Span>) -> TokenNode {
|
pub fn spanned_external_word(input: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::ExternalWord.into_token(input))
|
Token::ExternalWord.into_spanned(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -218,8 +241,11 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> TokenNode {
|
pub fn spanned_external_command(
|
||||||
TokenNode::Token(UnspannedToken::ExternalCommand(inner.into()).into_token(outer))
|
inner: impl Into<Span>,
|
||||||
|
outer: impl Into<Span>,
|
||||||
|
) -> SpannedToken {
|
||||||
|
Token::ExternalCommand(inner.into()).into_spanned(outer)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
||||||
@ -250,8 +276,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::Number(input.into()).into_token(span))
|
Token::Number(input.into()).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn var(input: impl Into<String>) -> CurriedToken {
|
pub fn var(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -265,8 +291,21 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::Variable(input.into()).into_token(span))
|
Token::Variable(input.into()).into_spanned(span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn it_var() -> CurriedToken {
|
||||||
|
Box::new(move |b| {
|
||||||
|
let (start, _) = b.consume("$");
|
||||||
|
let (inner_start, end) = b.consume("it");
|
||||||
|
|
||||||
|
TokenTreeBuilder::spanned_it_var(Span::new(inner_start, end), Span::new(start, end))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spanned_it_var(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
|
Token::ItVariable(input.into()).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn flag(input: impl Into<String>) -> CurriedToken {
|
pub fn flag(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -280,8 +319,9 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into(), span.into()))
|
let span = span.into();
|
||||||
|
Token::Flag(Flag::new(FlagKind::Longhand, input.into())).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
|
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -295,8 +335,10 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into(), span.into()))
|
let span = span.into();
|
||||||
|
|
||||||
|
Token::Flag(Flag::new(FlagKind::Shorthand, input.into())).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
||||||
@ -316,7 +358,7 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_call(input: Vec<TokenNode>, span: impl Into<Span>) -> Spanned<CallNode> {
|
pub fn spanned_call(input: Vec<SpannedToken>, span: impl Into<Span>) -> Spanned<CallNode> {
|
||||||
if input.is_empty() {
|
if input.is_empty() {
|
||||||
panic!("BUG: spanned call (TODO)")
|
panic!("BUG: spanned call (TODO)")
|
||||||
}
|
}
|
||||||
@ -337,7 +379,7 @@ impl TokenTreeBuilder {
|
|||||||
input: Vec<CurriedToken>,
|
input: Vec<CurriedToken>,
|
||||||
_open: &str,
|
_open: &str,
|
||||||
_close: &str,
|
_close: &str,
|
||||||
) -> (Span, Span, Span, Vec<TokenNode>) {
|
) -> (Span, Span, Span, Vec<SpannedToken>) {
|
||||||
let (start_open_paren, end_open_paren) = self.consume("(");
|
let (start_open_paren, end_open_paren) = self.consume("(");
|
||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
for item in input {
|
for item in input {
|
||||||
@ -362,13 +404,12 @@ impl TokenTreeBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_parens(
|
pub fn spanned_parens(
|
||||||
input: impl Into<Vec<TokenNode>>,
|
input: impl Into<Vec<SpannedToken>>,
|
||||||
spans: (Span, Span),
|
spans: (Span, Span),
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
TokenNode::Delimited(
|
Token::Delimited(DelimitedNode::new(Delimiter::Paren, spans, input.into()))
|
||||||
DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()),
|
.into_spanned(span.into())
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
|
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
|
||||||
@ -380,13 +421,12 @@ impl TokenTreeBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_square(
|
pub fn spanned_square(
|
||||||
input: impl Into<Vec<TokenNode>>,
|
input: impl Into<Vec<SpannedToken>>,
|
||||||
spans: (Span, Span),
|
spans: (Span, Span),
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
TokenNode::Delimited(
|
Token::Delimited(DelimitedNode::new(Delimiter::Square, spans, input.into()))
|
||||||
DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()),
|
.into_spanned(span)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
|
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
|
||||||
@ -398,19 +438,18 @@ impl TokenTreeBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_brace(
|
pub fn spanned_brace(
|
||||||
input: impl Into<Vec<TokenNode>>,
|
input: impl Into<Vec<SpannedToken>>,
|
||||||
spans: (Span, Span),
|
spans: (Span, Span),
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
TokenNode::Delimited(
|
Token::Delimited(DelimitedNode::new(Delimiter::Brace, spans, input.into()))
|
||||||
DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()),
|
.into_spanned(span)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sp() -> CurriedToken {
|
pub fn sp() -> CurriedToken {
|
||||||
Box::new(|b| {
|
Box::new(|b| {
|
||||||
let (start, end) = b.consume(" ");
|
let (start, end) = b.consume(" ");
|
||||||
TokenNode::Whitespace(Span::new(start, end))
|
Token::Whitespace.into_spanned((start, end))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -423,8 +462,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_ws(span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_ws(span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Whitespace(span.into())
|
Token::Whitespace.into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sep(input: impl Into<String>) -> CurriedToken {
|
pub fn sep(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -436,8 +475,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_sep(span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_sep(span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Separator(span.into())
|
Token::Separator.into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn comment(input: impl Into<String>) -> CurriedToken {
|
pub fn comment(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -453,8 +492,10 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Comment(Comment::line(input, span))
|
let span = span.into();
|
||||||
|
|
||||||
|
Token::Comment(Comment::line(input)).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume(&mut self, input: &str) -> (usize, usize) {
|
fn consume(&mut self, input: &str) -> (usize, usize) {
|
||||||
|
@ -1,81 +1,102 @@
|
|||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced,
|
BackoffColoringMode, ExpandSyntax, MaybeSpaceShape, MaybeWhitespaceEof,
|
||||||
BackoffColoringMode, ColorSyntax, MaybeSpaceShape,
|
|
||||||
};
|
};
|
||||||
|
use crate::hir::SpannedExpression;
|
||||||
use crate::TokensIterator;
|
use crate::TokensIterator;
|
||||||
use crate::{
|
use crate::{
|
||||||
hir::{self, ExpandContext, NamedArguments},
|
hir::{self, NamedArguments},
|
||||||
Flag,
|
Flag,
|
||||||
};
|
};
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_source::{PrettyDebugWithSource, Span, Spanned, SpannedItem, Text};
|
|
||||||
|
|
||||||
use nu_errors::{ArgumentError, ParseError};
|
use nu_errors::{ArgumentError, ParseError};
|
||||||
use nu_protocol::{NamedType, PositionalType, Signature};
|
use nu_protocol::{NamedType, PositionalType, Signature, SyntaxShape};
|
||||||
|
use nu_source::{HasFallibleSpan, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
|
type OptionalHeadTail = (Option<Vec<hir::SpannedExpression>>, Option<NamedArguments>);
|
||||||
|
|
||||||
pub fn parse_command_tail(
|
pub fn parse_command_tail(
|
||||||
config: &Signature,
|
config: &Signature,
|
||||||
context: &ExpandContext,
|
|
||||||
tail: &mut TokensIterator,
|
tail: &mut TokensIterator,
|
||||||
command_span: Span,
|
command_span: Span,
|
||||||
) -> Result<Option<(Option<Vec<hir::Expression>>, Option<NamedArguments>)>, ParseError> {
|
) -> Result<Option<OptionalHeadTail>, ParseError> {
|
||||||
let mut named = NamedArguments::new();
|
let mut named = NamedArguments::new();
|
||||||
trace_remaining("nodes", &tail, context.source());
|
let mut found_error: Option<ParseError> = None;
|
||||||
|
let mut rest_signature = config.clone();
|
||||||
|
|
||||||
|
trace!(target: "nu::parse::trace_remaining", "");
|
||||||
|
|
||||||
|
trace_remaining("nodes", &tail);
|
||||||
|
|
||||||
for (name, kind) in &config.named {
|
for (name, kind) in &config.named {
|
||||||
trace!(target: "nu::parse", "looking for {} : {:?}", name, kind);
|
trace!(target: "nu::parse::trace_remaining", "looking for {} : {:?}", name, kind);
|
||||||
|
|
||||||
|
tail.move_to(0);
|
||||||
|
|
||||||
match &kind.0 {
|
match &kind.0 {
|
||||||
NamedType::Switch => {
|
NamedType::Switch => {
|
||||||
let flag = extract_switch(name, tail, context.source());
|
let switch = extract_switch(name, tail);
|
||||||
|
|
||||||
named.insert_switch(name, flag);
|
match switch {
|
||||||
|
None => named.insert_switch(name, None),
|
||||||
|
Some((_, flag)) => {
|
||||||
|
named.insert_switch(name, Some(*flag));
|
||||||
|
rest_signature.remove_named(name);
|
||||||
|
tail.color_shape(flag.color(flag.span));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
NamedType::Mandatory(syntax_type) => {
|
NamedType::Mandatory(syntax_type) => {
|
||||||
match extract_mandatory(config, name, tail, context.source(), command_span) {
|
match extract_mandatory(config, name, tail, command_span) {
|
||||||
Err(err) => return Err(err), // produce a correct diagnostic
|
Err(err) => {
|
||||||
|
// remember this error, but continue coloring
|
||||||
|
found_error = Some(err);
|
||||||
|
}
|
||||||
Ok((pos, flag)) => {
|
Ok((pos, flag)) => {
|
||||||
tail.move_to(pos);
|
let result = expand_flag(tail, *syntax_type, flag, pos);
|
||||||
|
|
||||||
if tail.at_end() {
|
tail.move_to(0);
|
||||||
return Err(ParseError::argument_error(
|
|
||||||
config.name.clone().spanned(flag.span),
|
match result {
|
||||||
ArgumentError::MissingValueForName(name.to_string()),
|
Ok(expr) => {
|
||||||
));
|
named.insert_mandatory(name, expr);
|
||||||
|
rest_signature.remove_named(name);
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
found_error = Some(ParseError::argument_error(
|
||||||
|
config.name.clone().spanned(flag.span),
|
||||||
|
ArgumentError::MissingValueForName(name.to_string()),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr = expand_expr(&spaced(*syntax_type), tail, context)?;
|
|
||||||
|
|
||||||
tail.restart();
|
|
||||||
named.insert_mandatory(name, expr);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
NamedType::Optional(syntax_type) => {
|
NamedType::Optional(syntax_type) => {
|
||||||
match extract_optional(name, tail, context.source()) {
|
match extract_optional(name, tail) {
|
||||||
Err(err) => return Err(err), // produce a correct diagnostic
|
Err(err) => {
|
||||||
|
// remember this error, but continue coloring
|
||||||
|
found_error = Some(err);
|
||||||
|
}
|
||||||
Ok(Some((pos, flag))) => {
|
Ok(Some((pos, flag))) => {
|
||||||
tail.move_to(pos);
|
let result = expand_flag(tail, *syntax_type, flag, pos);
|
||||||
|
|
||||||
if tail.at_end() {
|
tail.move_to(0);
|
||||||
return Err(ParseError::argument_error(
|
|
||||||
config.name.clone().spanned(flag.span),
|
match result {
|
||||||
ArgumentError::MissingValueForName(name.to_string()),
|
Ok(expr) => {
|
||||||
));
|
named.insert_optional(name, Some(expr));
|
||||||
|
rest_signature.remove_named(name);
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
found_error = Some(ParseError::argument_error(
|
||||||
|
config.name.clone().spanned(flag.span),
|
||||||
|
ArgumentError::MissingValueForName(name.to_string()),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr = expand_expr(&spaced(*syntax_type), tail, context);
|
|
||||||
|
|
||||||
match expr {
|
|
||||||
Err(_) => named.insert_optional(name, None),
|
|
||||||
Ok(expr) => named.insert_optional(name, Some(expr)),
|
|
||||||
}
|
|
||||||
|
|
||||||
tail.restart();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
tail.restart();
|
|
||||||
named.insert_optional(name, None);
|
named.insert_optional(name, None);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -83,56 +104,66 @@ pub fn parse_command_tail(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
trace_remaining("after named", &tail, context.source());
|
trace_remaining("after named", &tail);
|
||||||
|
|
||||||
let mut positional = vec![];
|
let mut positional = vec![];
|
||||||
|
|
||||||
for arg in &config.positional {
|
match continue_parsing_positionals(&config, tail, &mut rest_signature, command_span) {
|
||||||
trace!(target: "nu::parse", "Processing positional {:?}", arg);
|
Ok(positionals) => {
|
||||||
|
positional = positionals;
|
||||||
match &arg.0 {
|
}
|
||||||
PositionalType::Mandatory(..) => {
|
Err(reason) => {
|
||||||
if tail.at_end_possible_ws() {
|
if found_error.is_none() && !tail.source().contains("help") {
|
||||||
return Err(ParseError::argument_error(
|
found_error = Some(reason);
|
||||||
config.name.clone().spanned(command_span),
|
|
||||||
ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
PositionalType::Optional(..) => {
|
|
||||||
if tail.at_end_possible_ws() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = expand_expr(&spaced(arg.0.syntax_type()), tail, context)?;
|
|
||||||
|
|
||||||
positional.push(result);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
trace_remaining("after positional", &tail, context.source());
|
trace_remaining("after positional", &tail);
|
||||||
|
|
||||||
if let Some((syntax_type, _)) = config.rest_positional {
|
if let Some((syntax_type, _)) = config.rest_positional {
|
||||||
let mut out = vec![];
|
let mut out = vec![];
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
if tail.at_end_possible_ws() {
|
if found_error.is_some() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let next = expand_expr(&spaced(syntax_type), tail, context)?;
|
tail.move_to(0);
|
||||||
|
|
||||||
out.push(next);
|
trace_remaining("start rest", &tail);
|
||||||
|
eat_any_whitespace(tail);
|
||||||
|
trace_remaining("after whitespace", &tail);
|
||||||
|
|
||||||
|
if tail.at_end() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
match tail.expand_syntax(syntax_type) {
|
||||||
|
Err(err) => found_error = Some(err),
|
||||||
|
Ok(next) => out.push(next),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
positional.extend(out);
|
positional.extend(out);
|
||||||
}
|
}
|
||||||
|
|
||||||
trace_remaining("after rest", &tail, context.source());
|
eat_any_whitespace(tail);
|
||||||
|
|
||||||
trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named);
|
// Consume any remaining tokens with backoff coloring mode
|
||||||
|
tail.expand_infallible(BackoffColoringMode::new(rest_signature.allowed()));
|
||||||
|
|
||||||
|
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
||||||
|
// this solution.
|
||||||
|
tail.sort_shapes();
|
||||||
|
|
||||||
|
if let Some(err) = found_error {
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
trace_remaining("after rest", &tail);
|
||||||
|
|
||||||
|
trace!(target: "nu::parse::trace_remaining", "Constructed positional={:?} named={:?}", positional, named);
|
||||||
|
|
||||||
let positional = if positional.is_empty() {
|
let positional = if positional.is_empty() {
|
||||||
None
|
None
|
||||||
@ -148,237 +179,111 @@ pub fn parse_command_tail(
|
|||||||
Some(named)
|
Some(named)
|
||||||
};
|
};
|
||||||
|
|
||||||
trace!(target: "nu::parse", "Normalized positional={:?} named={:?}", positional, named);
|
trace!(target: "nu::parse::trace_remaining", "Normalized positional={:?} named={:?}", positional, named);
|
||||||
|
|
||||||
Ok(Some((positional, named)))
|
Ok(Some((positional, named)))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
pub fn continue_parsing_positionals(
|
||||||
struct ColoringArgs {
|
config: &Signature,
|
||||||
vec: Vec<Option<Vec<Spanned<FlatShape>>>>,
|
tail: &mut TokensIterator,
|
||||||
}
|
rest_signature: &mut Signature,
|
||||||
|
command_span: Span,
|
||||||
|
) -> Result<Vec<SpannedExpression>, ParseError> {
|
||||||
|
let mut positional = vec![];
|
||||||
|
|
||||||
impl ColoringArgs {
|
for arg in &config.positional {
|
||||||
fn new(len: usize) -> ColoringArgs {
|
trace!(target: "nu::parse::trace_remaining", "Processing positional {:?}", arg);
|
||||||
let vec = vec![None; len];
|
|
||||||
ColoringArgs { vec }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert(&mut self, pos: usize, shapes: Vec<Spanned<FlatShape>>) {
|
tail.move_to(0);
|
||||||
self.vec[pos] = Some(shapes);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn spread_shapes(self, shapes: &mut Vec<Spanned<FlatShape>>) {
|
let result = expand_spaced_expr(arg.0.syntax_type(), tail);
|
||||||
for item in self.vec {
|
|
||||||
match item {
|
|
||||||
None => {}
|
|
||||||
Some(vec) => {
|
|
||||||
shapes.extend(vec);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
match result {
|
||||||
pub struct CommandTailShape;
|
Err(_) => match &arg.0 {
|
||||||
|
|
||||||
impl ColorSyntax for CommandTailShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = Signature;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"CommandTailShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
signature: &Signature,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Self::Info {
|
|
||||||
use nu_protocol::SyntaxShape;
|
|
||||||
|
|
||||||
let mut args = ColoringArgs::new(token_nodes.len());
|
|
||||||
trace_remaining("nodes", &token_nodes, context.source());
|
|
||||||
|
|
||||||
fn insert_flag(
|
|
||||||
token_nodes: &mut TokensIterator,
|
|
||||||
syntax_type: SyntaxShape,
|
|
||||||
args: &mut ColoringArgs,
|
|
||||||
flag: Flag,
|
|
||||||
pos: usize,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) {
|
|
||||||
let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
|
|
||||||
token_nodes.color_shape(flag.color());
|
|
||||||
token_nodes.move_to(pos);
|
|
||||||
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// We still want to color the flag even if the following tokens don't match, so don't
|
|
||||||
// propagate the error to the parent atomic block if it fails
|
|
||||||
let _ = token_nodes.atomic(|token_nodes| {
|
|
||||||
// We can live with unmatched syntax after a mandatory flag
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
||||||
|
|
||||||
// If the part after a mandatory flag isn't present, that's ok, but we
|
|
||||||
// should roll back any whitespace we chomped
|
|
||||||
color_fallible_syntax(&syntax_type, token_nodes, context)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
token_nodes.restart();
|
|
||||||
}
|
|
||||||
|
|
||||||
for (name, kind) in &signature.named {
|
|
||||||
trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind);
|
|
||||||
|
|
||||||
match &kind.0 {
|
|
||||||
NamedType::Switch => {
|
|
||||||
if let Some((pos, flag)) =
|
|
||||||
token_nodes.extract(|t| t.as_flag(name, context.source()))
|
|
||||||
{
|
|
||||||
args.insert(pos, vec![flag.color()])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NamedType::Mandatory(syntax_type) => {
|
|
||||||
match extract_mandatory(
|
|
||||||
signature,
|
|
||||||
name,
|
|
||||||
token_nodes,
|
|
||||||
context.source(),
|
|
||||||
Span::unknown(),
|
|
||||||
) {
|
|
||||||
Err(_) => {
|
|
||||||
// The mandatory flag didn't exist at all, so there's nothing to color
|
|
||||||
}
|
|
||||||
Ok((pos, flag)) => {
|
|
||||||
insert_flag(token_nodes, *syntax_type, &mut args, flag, pos, context)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NamedType::Optional(syntax_type) => {
|
|
||||||
match extract_optional(name, token_nodes, context.source()) {
|
|
||||||
Err(_) => {
|
|
||||||
// The optional flag didn't exist at all, so there's nothing to color
|
|
||||||
}
|
|
||||||
Ok(Some((pos, flag))) => {
|
|
||||||
insert_flag(token_nodes, *syntax_type, &mut args, flag, pos, context)
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(None) => {
|
|
||||||
token_nodes.restart();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
trace_remaining("after named", &token_nodes, context.source());
|
|
||||||
|
|
||||||
for arg in &signature.positional {
|
|
||||||
trace!("Processing positional {:?}", arg);
|
|
||||||
|
|
||||||
match &arg.0 {
|
|
||||||
PositionalType::Mandatory(..) => {
|
PositionalType::Mandatory(..) => {
|
||||||
if token_nodes.at_end() {
|
return Err(ParseError::argument_error(
|
||||||
break;
|
config.name.clone().spanned(command_span),
|
||||||
}
|
ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
PositionalType::Optional(..) => {
|
PositionalType::Optional(..) => {
|
||||||
if token_nodes.at_end() {
|
if tail.expand_syntax(MaybeWhitespaceEof).is_ok() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
Ok(result) => {
|
||||||
let pos = token_nodes.pos(false);
|
rest_signature.shift_positional();
|
||||||
|
positional.push(result);
|
||||||
match pos {
|
|
||||||
None => break,
|
|
||||||
Some(pos) => {
|
|
||||||
// We can live with an unmatched positional argument. Hopefully it will be
|
|
||||||
// matched by a future token
|
|
||||||
let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
||||||
|
|
||||||
// If no match, we should roll back any whitespace we chomped
|
|
||||||
color_fallible_syntax(&arg.0.syntax_type(), token_nodes, context)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
trace_remaining("after positional", &token_nodes, context.source());
|
Ok(positional)
|
||||||
|
}
|
||||||
|
|
||||||
if let Some((syntax_type, _)) = signature.rest_positional {
|
fn eat_any_whitespace(tail: &mut TokensIterator) {
|
||||||
loop {
|
loop {
|
||||||
if token_nodes.at_end_possible_ws() {
|
match tail.expand_infallible(MaybeSpaceShape) {
|
||||||
break;
|
None => break,
|
||||||
}
|
Some(_) => continue,
|
||||||
|
|
||||||
let pos = token_nodes.pos(false);
|
|
||||||
|
|
||||||
match pos {
|
|
||||||
None => break,
|
|
||||||
Some(pos) => {
|
|
||||||
// If any arguments don't match, we'll fall back to backoff coloring mode
|
|
||||||
let (result, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
||||||
|
|
||||||
// If no match, we should roll back any whitespace we chomped
|
|
||||||
color_fallible_syntax(&syntax_type, token_nodes, context)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Err(_) => break,
|
|
||||||
Ok(_) => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
token_nodes.silently_mutate_shapes(|shapes| args.spread_shapes(shapes));
|
|
||||||
|
|
||||||
// Consume any remaining tokens with backoff coloring mode
|
|
||||||
color_syntax(&BackoffColoringMode, token_nodes, context);
|
|
||||||
|
|
||||||
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
|
||||||
// this solution.
|
|
||||||
token_nodes.sort_shapes()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option<Flag> {
|
fn expand_flag(
|
||||||
tokens.extract(|t| t.as_flag(name, source)).map(|f| f.1)
|
token_nodes: &mut TokensIterator,
|
||||||
|
syntax_type: SyntaxShape,
|
||||||
|
flag: Spanned<Flag>,
|
||||||
|
pos: usize,
|
||||||
|
) -> Result<SpannedExpression, ()> {
|
||||||
|
token_nodes.color_shape(flag.color(flag.span));
|
||||||
|
|
||||||
|
let result = token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
token_nodes.move_to(pos);
|
||||||
|
|
||||||
|
if token_nodes.at_end() {
|
||||||
|
return Err(ParseError::unexpected_eof("flag", Span::unknown()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let expr = expand_spaced_expr(syntax_type, token_nodes)?;
|
||||||
|
|
||||||
|
Ok(expr)
|
||||||
|
});
|
||||||
|
|
||||||
|
let expr = result.map_err(|_| ())?;
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_spaced_expr<
|
||||||
|
T: HasFallibleSpan + PrettyDebugWithSource + Clone + std::fmt::Debug + 'static,
|
||||||
|
>(
|
||||||
|
syntax: impl ExpandSyntax<Output = Result<T, ParseError>>,
|
||||||
|
token_nodes: &mut TokensIterator,
|
||||||
|
) -> Result<T, ParseError> {
|
||||||
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||||
|
token_nodes.expand_syntax(syntax)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_switch(
|
||||||
|
name: &str,
|
||||||
|
tokens: &mut hir::TokensIterator<'_>,
|
||||||
|
) -> Option<(usize, Spanned<Flag>)> {
|
||||||
|
let source = tokens.source();
|
||||||
|
tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span())))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_mandatory(
|
fn extract_mandatory(
|
||||||
config: &Signature,
|
config: &Signature,
|
||||||
name: &str,
|
name: &str,
|
||||||
tokens: &mut hir::TokensIterator<'_>,
|
tokens: &mut hir::TokensIterator<'_>,
|
||||||
source: &Text,
|
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Result<(usize, Flag), ParseError> {
|
) -> Result<(usize, Spanned<Flag>), ParseError> {
|
||||||
let flag = tokens.extract(|t| t.as_flag(name, source));
|
let source = tokens.source();
|
||||||
|
let flag = tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span())));
|
||||||
|
|
||||||
match flag {
|
match flag {
|
||||||
None => Err(ParseError::argument_error(
|
None => Err(ParseError::argument_error(
|
||||||
@ -396,9 +301,9 @@ fn extract_mandatory(
|
|||||||
fn extract_optional(
|
fn extract_optional(
|
||||||
name: &str,
|
name: &str,
|
||||||
tokens: &mut hir::TokensIterator<'_>,
|
tokens: &mut hir::TokensIterator<'_>,
|
||||||
source: &Text,
|
) -> Result<Option<(usize, Spanned<Flag>)>, ParseError> {
|
||||||
) -> Result<Option<(usize, Flag)>, ParseError> {
|
let source = tokens.source();
|
||||||
let flag = tokens.extract(|t| t.as_flag(name, source));
|
let flag = tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span())));
|
||||||
|
|
||||||
match flag {
|
match flag {
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
@ -409,15 +314,24 @@ fn extract_optional(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>, source: &Text) {
|
pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>) {
|
||||||
|
let offset = tail.clone().span_at_cursor();
|
||||||
|
let source = tail.source();
|
||||||
|
|
||||||
trace!(
|
trace!(
|
||||||
target: "nu::parse",
|
target: "nu::parse::trace_remaining",
|
||||||
"{} = {:?}",
|
"{} = {}",
|
||||||
desc,
|
desc,
|
||||||
itertools::join(
|
itertools::join(
|
||||||
tail.debug_remaining()
|
tail.debug_remaining()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|i| format!("%{}%", i.debug(source))),
|
.map(|val| {
|
||||||
|
if val.span().start() == offset.start() {
|
||||||
|
format!("<|> %{}%", val.debug(&source))
|
||||||
|
} else {
|
||||||
|
format!("%{}%", val.debug(&source))
|
||||||
|
}
|
||||||
|
}),
|
||||||
" "
|
" "
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-plugin"
|
name = "nu-plugin"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "Nushell Plugin"
|
description = "Nushell Plugin"
|
||||||
@ -10,10 +10,10 @@ license = "MIT"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
nu-value-ext = { path = "../nu-value-ext", version = "0.8.0" }
|
nu-value-ext = { path = "../nu-value-ext", version = "0.9.0" }
|
||||||
|
|
||||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||||
serde = { version = "1.0.103", features = ["derive"] }
|
serde = { version = "1.0.103", features = ["derive"] }
|
||||||
@ -21,4 +21,4 @@ num-bigint = { version = "0.2.3", features = ["serde"] }
|
|||||||
serde_json = "1.0.44"
|
serde_json = "1.0.44"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-protocol"
|
name = "nu-protocol"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "Core values and protocols for Nushell"
|
description = "Core values and protocols for Nushell"
|
||||||
@ -10,8 +10,8 @@ license = "MIT"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
|
|
||||||
serde = { version = "1.0.103", features = ["derive"] }
|
serde = { version = "1.0.103", features = ["derive"] }
|
||||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||||
@ -30,14 +30,12 @@ nom-tracable = "0.4.1"
|
|||||||
typetag = "0.1.4"
|
typetag = "0.1.4"
|
||||||
query_interface = "0.3.5"
|
query_interface = "0.3.5"
|
||||||
byte-unit = "3.0.3"
|
byte-unit = "3.0.3"
|
||||||
chrono-humanize = "0.0.11"
|
|
||||||
natural = "0.3.0"
|
natural = "0.3.0"
|
||||||
|
|
||||||
# implement conversions
|
# implement conversions
|
||||||
subprocess = "0.1.18"
|
|
||||||
serde_yaml = "0.8"
|
serde_yaml = "0.8"
|
||||||
toml = "0.5.5"
|
toml = "0.5.5"
|
||||||
serde_json = "1.0.44"
|
serde_json = "1.0.44"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
@ -5,12 +5,19 @@ use nu_errors::ShellError;
|
|||||||
use nu_source::Tag;
|
use nu_source::Tag;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Associated information for the call of a command, including the args passed to the command and a tag that spans the name of the command being called
|
||||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||||
pub struct CallInfo {
|
pub struct CallInfo {
|
||||||
|
/// The arguments associated with this call
|
||||||
pub args: EvaluatedArgs,
|
pub args: EvaluatedArgs,
|
||||||
|
/// The tag (underline-able position) of the name of the call itself
|
||||||
pub name_tag: Tag,
|
pub name_tag: Tag,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The set of positional and named arguments, after their values have been evaluated.
|
||||||
|
///
|
||||||
|
/// * Positional arguments are those who are given as values, without any associated flag. For example, in `foo arg1 arg2`, both `arg1` and `arg2` are positional arguments.
|
||||||
|
/// * Named arguments are those associated with a flag. For example, `foo --given bar` the named argument would be name `given` and the value `bar`.
|
||||||
#[derive(Debug, Default, new, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Default, new, Serialize, Deserialize, Clone)]
|
||||||
pub struct EvaluatedArgs {
|
pub struct EvaluatedArgs {
|
||||||
pub positional: Option<Vec<Value>>,
|
pub positional: Option<Vec<Value>>,
|
||||||
@ -18,6 +25,7 @@ pub struct EvaluatedArgs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EvaluatedArgs {
|
impl EvaluatedArgs {
|
||||||
|
/// Retrieve a subset of positional arguments starting at a given position
|
||||||
pub fn slice_from(&self, from: usize) -> Vec<Value> {
|
pub fn slice_from(&self, from: usize) -> Vec<Value> {
|
||||||
let positional = &self.positional;
|
let positional = &self.positional;
|
||||||
|
|
||||||
@ -27,6 +35,7 @@ impl EvaluatedArgs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the nth positional argument, if possible
|
||||||
pub fn nth(&self, pos: usize) -> Option<&Value> {
|
pub fn nth(&self, pos: usize) -> Option<&Value> {
|
||||||
match &self.positional {
|
match &self.positional {
|
||||||
None => None,
|
None => None,
|
||||||
@ -34,6 +43,7 @@ impl EvaluatedArgs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the nth positional argument, error if not possible
|
||||||
pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> {
|
pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> {
|
||||||
match &self.positional {
|
match &self.positional {
|
||||||
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
||||||
@ -44,6 +54,7 @@ impl EvaluatedArgs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the number of positional arguments available
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
match &self.positional {
|
match &self.positional {
|
||||||
None => 0,
|
None => 0,
|
||||||
@ -51,10 +62,12 @@ impl EvaluatedArgs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return if there are no positional arguments
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
self.len() == 0
|
self.len() == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return true if the set of named arguments contains the name provided
|
||||||
pub fn has(&self, name: &str) -> bool {
|
pub fn has(&self, name: &str) -> bool {
|
||||||
match &self.named {
|
match &self.named {
|
||||||
None => false,
|
None => false,
|
||||||
@ -62,6 +75,7 @@ impl EvaluatedArgs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Gets the corresponding Value for the named argument given, if possible
|
||||||
pub fn get(&self, name: &str) -> Option<&Value> {
|
pub fn get(&self, name: &str) -> Option<&Value> {
|
||||||
match &self.named {
|
match &self.named {
|
||||||
None => None,
|
None => None,
|
||||||
@ -69,6 +83,7 @@ impl EvaluatedArgs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Iterates over the positional arguments
|
||||||
pub fn positional_iter(&self) -> PositionalIter<'_> {
|
pub fn positional_iter(&self) -> PositionalIter<'_> {
|
||||||
match &self.positional {
|
match &self.positional {
|
||||||
None => PositionalIter::Empty,
|
None => PositionalIter::Empty,
|
||||||
@ -80,6 +95,7 @@ impl EvaluatedArgs {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// An iterator to help iterate over positional arguments
|
||||||
pub enum PositionalIter<'a> {
|
pub enum PositionalIter<'a> {
|
||||||
Empty,
|
Empty,
|
||||||
Array(std::slice::Iter<'a, Value>),
|
Array(std::slice::Iter<'a, Value>),
|
||||||
@ -88,6 +104,7 @@ pub enum PositionalIter<'a> {
|
|||||||
impl<'a> Iterator for PositionalIter<'a> {
|
impl<'a> Iterator for PositionalIter<'a> {
|
||||||
type Item = &'a Value;
|
type Item = &'a Value;
|
||||||
|
|
||||||
|
/// The required `next` function to implement the Iterator trait
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
match self {
|
match self {
|
||||||
PositionalIter::Empty => None,
|
PositionalIter::Empty => None,
|
||||||
|
@ -20,7 +20,7 @@ pub use crate::type_shape::{Row as RowType, Type};
|
|||||||
pub use crate::value::column_path::{did_you_mean, ColumnPath, PathMember, UnspannedPathMember};
|
pub use crate::value::column_path::{did_you_mean, ColumnPath, PathMember, UnspannedPathMember};
|
||||||
pub use crate::value::dict::{Dictionary, TaggedDictBuilder};
|
pub use crate::value::dict::{Dictionary, TaggedDictBuilder};
|
||||||
pub use crate::value::evaluate::{Evaluate, EvaluateTrait, Scope};
|
pub use crate::value::evaluate::{Evaluate, EvaluateTrait, Scope};
|
||||||
pub use crate::value::primitive::format_primitive;
|
|
||||||
pub use crate::value::primitive::Primitive;
|
pub use crate::value::primitive::Primitive;
|
||||||
|
pub use crate::value::primitive::{format_date, format_duration, format_primitive};
|
||||||
pub use crate::value::range::{Range, RangeInclusion};
|
pub use crate::value::range::{Range, RangeInclusion};
|
||||||
pub use crate::value::{UntaggedValue, Value};
|
pub use crate::value::{UntaggedValue, Value};
|
||||||
|
@ -1,11 +1,16 @@
|
|||||||
// These macros exist to differentiate between intentional writing to stdout
|
/// Outputs to standard out
|
||||||
// and stray printlns left by accident
|
///
|
||||||
|
/// Note: this exists to differentiate between intentional writing to stdout
|
||||||
|
/// and stray printlns left by accident
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! outln {
|
macro_rules! outln {
|
||||||
($($tokens:tt)*) => { println!($($tokens)*) }
|
($($tokens:tt)*) => { println!($($tokens)*) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Outputs to standard error
|
||||||
|
///
|
||||||
|
/// Note: this exists to differentiate between intentional writing to stdout
|
||||||
|
/// and stray printlns left by accident
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! errln {
|
macro_rules! errln {
|
||||||
($($tokens:tt)*) => { eprintln!($($tokens)*) }
|
($($tokens:tt)*) => { eprintln!($($tokens)*) }
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
#![allow(clippy::should_implement_trait)]
|
#![allow(clippy::should_implement_trait)]
|
||||||
|
|
||||||
|
/// Helper type to allow passing something that may potentially be owned, but could also be borrowed
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum MaybeOwned<'a, T> {
|
pub enum MaybeOwned<'a, T> {
|
||||||
Owned(T),
|
Owned(T),
|
||||||
@ -7,6 +8,7 @@ pub enum MaybeOwned<'a, T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<T> MaybeOwned<'_, T> {
|
impl<T> MaybeOwned<'_, T> {
|
||||||
|
/// Allows the borrowing of an owned value or passes out the borrowed value
|
||||||
pub fn borrow(&self) -> &T {
|
pub fn borrow(&self) -> &T {
|
||||||
match self {
|
match self {
|
||||||
MaybeOwned::Owned(v) => v,
|
MaybeOwned::Owned(v) => v,
|
||||||
|
@ -3,21 +3,33 @@ use nu_errors::ShellError;
|
|||||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// The inner set of actions for the command processor. Each denotes a way to change state in the processor without changing it directly from the command itself.
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub enum CommandAction {
|
pub enum CommandAction {
|
||||||
|
/// Change to a new directory or path (in non-filesystem situations)
|
||||||
ChangePath(String),
|
ChangePath(String),
|
||||||
|
/// Exit out of Nu
|
||||||
Exit,
|
Exit,
|
||||||
|
/// Display an error
|
||||||
Error(ShellError),
|
Error(ShellError),
|
||||||
|
/// Enter a new shell at the given path
|
||||||
EnterShell(String),
|
EnterShell(String),
|
||||||
|
/// Convert the value given from one type to another
|
||||||
AutoConvert(Value, String),
|
AutoConvert(Value, String),
|
||||||
|
/// Enter a value shell, one that allows exploring inside of a Value
|
||||||
EnterValueShell(Value),
|
EnterValueShell(Value),
|
||||||
|
/// Enter the help shell, which allows exploring the help system
|
||||||
EnterHelpShell(Value),
|
EnterHelpShell(Value),
|
||||||
|
/// Go to the previous shell in the shell ring buffer
|
||||||
PreviousShell,
|
PreviousShell,
|
||||||
|
/// Go to the next shell in the shell ring buffer
|
||||||
NextShell,
|
NextShell,
|
||||||
|
/// Leave the current shell. If it's the last shell, exit out of Nu
|
||||||
LeaveShell,
|
LeaveShell,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebug for CommandAction {
|
impl PrettyDebug for CommandAction {
|
||||||
|
/// Get a command action ready to be pretty-printed
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
CommandAction::ChangePath(path) => b::typed("change path", b::description(path)),
|
CommandAction::ChangePath(path) => b::typed("change path", b::description(path)),
|
||||||
@ -36,14 +48,19 @@ impl PrettyDebug for CommandAction {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The fundamental success type in the pipeline. Commands return these values as their main responsibility
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub enum ReturnSuccess {
|
pub enum ReturnSuccess {
|
||||||
|
/// A value to be used or shown to the user
|
||||||
Value(Value),
|
Value(Value),
|
||||||
|
/// A debug-enabled value to be used or shown to the user
|
||||||
DebugValue(Value),
|
DebugValue(Value),
|
||||||
|
/// An action to be performed as values pass out of the command. These are performed rather than passed to the next command in the pipeline
|
||||||
Action(CommandAction),
|
Action(CommandAction),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebug for ReturnSuccess {
|
impl PrettyDebug for ReturnSuccess {
|
||||||
|
/// Get a return success ready to be pretty-printed
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
ReturnSuccess::Value(value) => b::typed("value", value.pretty()),
|
ReturnSuccess::Value(value) => b::typed("value", value.pretty()),
|
||||||
@ -53,15 +70,17 @@ impl PrettyDebug for ReturnSuccess {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The core Result type for pipelines
|
||||||
pub type ReturnValue = Result<ReturnSuccess, ShellError>;
|
pub type ReturnValue = Result<ReturnSuccess, ShellError>;
|
||||||
|
|
||||||
impl Into<ReturnValue> for Value {
|
impl From<Value> for ReturnValue {
|
||||||
fn into(self) -> ReturnValue {
|
fn from(v: Value) -> Self {
|
||||||
Ok(ReturnSuccess::Value(self))
|
Ok(ReturnSuccess::Value(v))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReturnSuccess {
|
impl ReturnSuccess {
|
||||||
|
/// Get to the contained Value, if possible
|
||||||
pub fn raw_value(&self) -> Option<Value> {
|
pub fn raw_value(&self) -> Option<Value> {
|
||||||
match self {
|
match self {
|
||||||
ReturnSuccess::Value(raw) => Some(raw.clone()),
|
ReturnSuccess::Value(raw) => Some(raw.clone()),
|
||||||
@ -70,18 +89,22 @@ impl ReturnSuccess {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper function for an action to change the the path
|
||||||
pub fn change_cwd(path: String) -> ReturnValue {
|
pub fn change_cwd(path: String) -> ReturnValue {
|
||||||
Ok(ReturnSuccess::Action(CommandAction::ChangePath(path)))
|
Ok(ReturnSuccess::Action(CommandAction::ChangePath(path)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper function to create simple values for returning
|
||||||
pub fn value(input: impl Into<Value>) -> ReturnValue {
|
pub fn value(input: impl Into<Value>) -> ReturnValue {
|
||||||
Ok(ReturnSuccess::Value(input.into()))
|
Ok(ReturnSuccess::Value(input.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper function to create simple debug-enabled values for returning
|
||||||
pub fn debug_value(input: impl Into<Value>) -> ReturnValue {
|
pub fn debug_value(input: impl Into<Value>) -> ReturnValue {
|
||||||
Ok(ReturnSuccess::DebugValue(input.into()))
|
Ok(ReturnSuccess::DebugValue(input.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper function for creating actions
|
||||||
pub fn action(input: CommandAction) -> ReturnValue {
|
pub fn action(input: CommandAction) -> ReturnValue {
|
||||||
Ok(ReturnSuccess::Action(input))
|
Ok(ReturnSuccess::Action(input))
|
||||||
}
|
}
|
||||||
|
@ -4,20 +4,28 @@ use indexmap::IndexMap;
|
|||||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource};
|
use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// The types of named parameter that a command can have
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
pub enum NamedType {
|
pub enum NamedType {
|
||||||
|
/// A flag without any associated argument. eg) `foo --bar`
|
||||||
Switch,
|
Switch,
|
||||||
|
/// A mandatory flag, with associated argument. eg) `foo --required xyz`
|
||||||
Mandatory(SyntaxShape),
|
Mandatory(SyntaxShape),
|
||||||
|
/// An optional flag, with associated argument. eg) `foo --optional abc`
|
||||||
Optional(SyntaxShape),
|
Optional(SyntaxShape),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The type of positional arguments
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub enum PositionalType {
|
pub enum PositionalType {
|
||||||
|
/// A mandatory postional argument with the expected shape of the value
|
||||||
Mandatory(String, SyntaxShape),
|
Mandatory(String, SyntaxShape),
|
||||||
|
/// An optional positional argument with the expected shape of the value
|
||||||
Optional(String, SyntaxShape),
|
Optional(String, SyntaxShape),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebug for PositionalType {
|
impl PrettyDebug for PositionalType {
|
||||||
|
/// Prepare the PositionalType for pretty-printing
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
PositionalType::Mandatory(string, shape) => {
|
PositionalType::Mandatory(string, shape) => {
|
||||||
@ -33,26 +41,32 @@ impl PrettyDebug for PositionalType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PositionalType {
|
impl PositionalType {
|
||||||
|
/// Helper to create a mandatory positional argument type
|
||||||
pub fn mandatory(name: &str, ty: SyntaxShape) -> PositionalType {
|
pub fn mandatory(name: &str, ty: SyntaxShape) -> PositionalType {
|
||||||
PositionalType::Mandatory(name.to_string(), ty)
|
PositionalType::Mandatory(name.to_string(), ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper to create a mandatory positional argument with an "any" type
|
||||||
pub fn mandatory_any(name: &str) -> PositionalType {
|
pub fn mandatory_any(name: &str) -> PositionalType {
|
||||||
PositionalType::Mandatory(name.to_string(), SyntaxShape::Any)
|
PositionalType::Mandatory(name.to_string(), SyntaxShape::Any)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper to create a mandatory positional argument with a block type
|
||||||
pub fn mandatory_block(name: &str) -> PositionalType {
|
pub fn mandatory_block(name: &str) -> PositionalType {
|
||||||
PositionalType::Mandatory(name.to_string(), SyntaxShape::Block)
|
PositionalType::Mandatory(name.to_string(), SyntaxShape::Block)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper to create a optional positional argument type
|
||||||
pub fn optional(name: &str, ty: SyntaxShape) -> PositionalType {
|
pub fn optional(name: &str, ty: SyntaxShape) -> PositionalType {
|
||||||
PositionalType::Optional(name.to_string(), ty)
|
PositionalType::Optional(name.to_string(), ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper to create a optional positional argument with an "any" type
|
||||||
pub fn optional_any(name: &str) -> PositionalType {
|
pub fn optional_any(name: &str) -> PositionalType {
|
||||||
PositionalType::Optional(name.to_string(), SyntaxShape::Any)
|
PositionalType::Optional(name.to_string(), SyntaxShape::Any)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Gets the name of the positional argument
|
||||||
pub fn name(&self) -> &str {
|
pub fn name(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
PositionalType::Mandatory(s, _) => s,
|
PositionalType::Mandatory(s, _) => s,
|
||||||
@ -60,6 +74,7 @@ impl PositionalType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Gets the expected type of a positional argument
|
||||||
pub fn syntax_type(&self) -> SyntaxShape {
|
pub fn syntax_type(&self) -> SyntaxShape {
|
||||||
match *self {
|
match *self {
|
||||||
PositionalType::Mandatory(_, t) => t,
|
PositionalType::Mandatory(_, t) => t,
|
||||||
@ -70,19 +85,60 @@ impl PositionalType {
|
|||||||
|
|
||||||
type Description = String;
|
type Description = String;
|
||||||
|
|
||||||
|
/// The full signature of a command. All commands have a signature similar to a function signature.
|
||||||
|
/// Commands will use this information to register themselves with Nu's core engine so that the command
|
||||||
|
/// can be invoked, help can be displayed, and calls to the command can be error-checked.
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
pub struct Signature {
|
pub struct Signature {
|
||||||
|
/// The name of the command. Used when calling the command
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
/// Usage instructions about the command
|
||||||
pub usage: String,
|
pub usage: String,
|
||||||
|
/// The list of positional arguments, both required and optional, and their corresponding types and help text
|
||||||
pub positional: Vec<(PositionalType, Description)>,
|
pub positional: Vec<(PositionalType, Description)>,
|
||||||
|
/// After the positional arguments, a catch-all for the rest of the arguments that might follow, their type, and help text
|
||||||
pub rest_positional: Option<(SyntaxShape, Description)>,
|
pub rest_positional: Option<(SyntaxShape, Description)>,
|
||||||
|
/// The named flags with corresponding type and help text
|
||||||
pub named: IndexMap<String, (NamedType, Description)>,
|
pub named: IndexMap<String, (NamedType, Description)>,
|
||||||
|
/// The type of values being sent out from the command into the pipeline, if any
|
||||||
pub yields: Option<Type>,
|
pub yields: Option<Type>,
|
||||||
|
/// The type of values being read in from the pipeline into the command, if any
|
||||||
pub input: Option<Type>,
|
pub input: Option<Type>,
|
||||||
|
/// If the command is expected to filter data, or to consume it (as a sink)
|
||||||
pub is_filter: bool,
|
pub is_filter: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Signature {
|
||||||
|
pub fn shift_positional(&mut self) {
|
||||||
|
self.positional = Vec::from(&self.positional[1..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove_named(&mut self, name: &str) {
|
||||||
|
self.named.remove(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allowed(&self) -> Vec<String> {
|
||||||
|
let mut allowed = indexmap::IndexSet::new();
|
||||||
|
|
||||||
|
for (name, _) in &self.named {
|
||||||
|
allowed.insert(format!("--{}", name));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (ty, _) in &self.positional {
|
||||||
|
let shape = ty.syntax_type();
|
||||||
|
allowed.insert(shape.display());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some((shape, _)) = &self.rest_positional {
|
||||||
|
allowed.insert(shape.display());
|
||||||
|
}
|
||||||
|
|
||||||
|
allowed.into_iter().collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Signature {
|
impl PrettyDebugWithSource for Signature {
|
||||||
|
/// Prepare a Signature for pretty-printing
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::typed(
|
b::typed(
|
||||||
"signature",
|
"signature",
|
||||||
@ -101,28 +157,32 @@ impl PrettyDebugWithSource for Signature {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Signature {
|
impl Signature {
|
||||||
|
/// Create a new command signagure with the given name
|
||||||
pub fn new(name: impl Into<String>) -> Signature {
|
pub fn new(name: impl Into<String>) -> Signature {
|
||||||
Signature {
|
Signature {
|
||||||
name: name.into(),
|
name: name.into(),
|
||||||
usage: String::new(),
|
usage: String::new(),
|
||||||
positional: vec![],
|
positional: vec![],
|
||||||
rest_positional: None,
|
rest_positional: None,
|
||||||
named: IndexMap::new(),
|
named: indexmap::indexmap! {"help".into() => (NamedType::Switch, "Display this help message".into())},
|
||||||
is_filter: false,
|
is_filter: false,
|
||||||
yields: None,
|
yields: None,
|
||||||
input: None,
|
input: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a new signature
|
||||||
pub fn build(name: impl Into<String>) -> Signature {
|
pub fn build(name: impl Into<String>) -> Signature {
|
||||||
Signature::new(name.into())
|
Signature::new(name.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a description to the signature
|
||||||
pub fn desc(mut self, usage: impl Into<String>) -> Signature {
|
pub fn desc(mut self, usage: impl Into<String>) -> Signature {
|
||||||
self.usage = usage.into();
|
self.usage = usage.into();
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a required positional argument to the signature
|
||||||
pub fn required(
|
pub fn required(
|
||||||
mut self,
|
mut self,
|
||||||
name: impl Into<String>,
|
name: impl Into<String>,
|
||||||
@ -137,6 +197,7 @@ impl Signature {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add an optional positional argument to the signature
|
||||||
pub fn optional(
|
pub fn optional(
|
||||||
mut self,
|
mut self,
|
||||||
name: impl Into<String>,
|
name: impl Into<String>,
|
||||||
@ -151,6 +212,7 @@ impl Signature {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add an optional named flag argument to the signature
|
||||||
pub fn named(
|
pub fn named(
|
||||||
mut self,
|
mut self,
|
||||||
name: impl Into<String>,
|
name: impl Into<String>,
|
||||||
@ -163,6 +225,7 @@ impl Signature {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a required named flag argument to the signature
|
||||||
pub fn required_named(
|
pub fn required_named(
|
||||||
mut self,
|
mut self,
|
||||||
name: impl Into<String>,
|
name: impl Into<String>,
|
||||||
@ -175,28 +238,32 @@ impl Signature {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a switch to the signature
|
||||||
pub fn switch(mut self, name: impl Into<String>, desc: impl Into<String>) -> Signature {
|
pub fn switch(mut self, name: impl Into<String>, desc: impl Into<String>) -> Signature {
|
||||||
self.named
|
self.named
|
||||||
.insert(name.into(), (NamedType::Switch, desc.into()));
|
.insert(name.into(), (NamedType::Switch, desc.into()));
|
||||||
|
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the filter flag for the signature
|
||||||
pub fn filter(mut self) -> Signature {
|
pub fn filter(mut self) -> Signature {
|
||||||
self.is_filter = true;
|
self.is_filter = true;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the type for the "rest" of the positional arguments
|
||||||
pub fn rest(mut self, ty: SyntaxShape, desc: impl Into<String>) -> Signature {
|
pub fn rest(mut self, ty: SyntaxShape, desc: impl Into<String>) -> Signature {
|
||||||
self.rest_positional = Some((ty, desc.into()));
|
self.rest_positional = Some((ty, desc.into()));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a type for the output of the command to the signature
|
||||||
pub fn yields(mut self, ty: Type) -> Signature {
|
pub fn yields(mut self, ty: Type) -> Signature {
|
||||||
self.yields = Some(ty);
|
self.yields = Some(ty);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a type for the input of the command to the signature
|
||||||
pub fn input(mut self, ty: Type) -> Signature {
|
pub fn input(mut self, ty: Type) -> Signature {
|
||||||
self.input = Some(ty);
|
self.input = Some(ty);
|
||||||
self
|
self
|
||||||
|
@ -1,33 +1,45 @@
|
|||||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// The syntactic shapes that values must match to be passed into a command. You can think of this as the type-checking that occurs when you call a function.
|
||||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||||
pub enum SyntaxShape {
|
pub enum SyntaxShape {
|
||||||
|
/// Any syntactic form is allowed
|
||||||
Any,
|
Any,
|
||||||
|
/// Strings and string-like bare words are allowed
|
||||||
String,
|
String,
|
||||||
|
/// Values that can be the right hand side of a '.'
|
||||||
Member,
|
Member,
|
||||||
|
/// A dotted path to navigate the table
|
||||||
ColumnPath,
|
ColumnPath,
|
||||||
|
/// Only a numeric (integer or decimal) value is allowed
|
||||||
Number,
|
Number,
|
||||||
|
/// A range is allowed (eg, `1..3`)
|
||||||
Range,
|
Range,
|
||||||
|
/// Only an integer value is allowed
|
||||||
Int,
|
Int,
|
||||||
|
/// A filepath is allowed
|
||||||
Path,
|
Path,
|
||||||
|
/// A glob pattern is allowed, eg `foo*`
|
||||||
Pattern,
|
Pattern,
|
||||||
|
/// A block is allowed, eg `{start this thing}`
|
||||||
Block,
|
Block,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebug for SyntaxShape {
|
impl PrettyDebug for SyntaxShape {
|
||||||
|
/// Prepare SyntaxShape for pretty-printing
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
b::kind(match self {
|
b::kind(match self {
|
||||||
SyntaxShape::Any => "any shape",
|
SyntaxShape::Any => "any",
|
||||||
SyntaxShape::String => "string shape",
|
SyntaxShape::String => "string",
|
||||||
SyntaxShape::Member => "member shape",
|
SyntaxShape::Member => "member",
|
||||||
SyntaxShape::ColumnPath => "column path shape",
|
SyntaxShape::ColumnPath => "column path",
|
||||||
SyntaxShape::Number => "number shape",
|
SyntaxShape::Number => "number",
|
||||||
SyntaxShape::Range => "range shape",
|
SyntaxShape::Range => "range",
|
||||||
SyntaxShape::Int => "integer shape",
|
SyntaxShape::Int => "integer",
|
||||||
SyntaxShape::Path => "file path shape",
|
SyntaxShape::Path => "file path",
|
||||||
SyntaxShape::Pattern => "pattern shape",
|
SyntaxShape::Pattern => "pattern",
|
||||||
SyntaxShape::Block => "block shape",
|
SyntaxShape::Block => "block",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,37 +1,44 @@
|
|||||||
use nu_source::{DebugDocBuilder, HasSpan, Spanned, SpannedItem, Tagged};
|
use nu_source::{DebugDocBuilder, HasSpan, Spanned, SpannedItem, Tagged};
|
||||||
|
|
||||||
|
/// A trait that allows structures to define a known .type_name() which pretty-prints the type
|
||||||
pub trait ShellTypeName {
|
pub trait ShellTypeName {
|
||||||
fn type_name(&self) -> &'static str;
|
fn type_name(&self) -> &'static str;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: ShellTypeName> ShellTypeName for Spanned<T> {
|
impl<T: ShellTypeName> ShellTypeName for Spanned<T> {
|
||||||
|
/// Return the type_name of the spanned item
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
self.item.type_name()
|
self.item.type_name()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: ShellTypeName> ShellTypeName for &T {
|
impl<T: ShellTypeName> ShellTypeName for &T {
|
||||||
|
/// Return the type_name for the borrowed reference
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
(*self).type_name()
|
(*self).type_name()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A trait that allows structures to define a known way to return a spanned type name
|
||||||
pub trait SpannedTypeName {
|
pub trait SpannedTypeName {
|
||||||
fn spanned_type_name(&self) -> Spanned<&'static str>;
|
fn spanned_type_name(&self) -> Spanned<&'static str>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: ShellTypeName + HasSpan> SpannedTypeName for T {
|
impl<T: ShellTypeName + HasSpan> SpannedTypeName for T {
|
||||||
|
/// Return the type name as a spanned string
|
||||||
fn spanned_type_name(&self) -> Spanned<&'static str> {
|
fn spanned_type_name(&self) -> Spanned<&'static str> {
|
||||||
self.type_name().spanned(self.span())
|
self.type_name().spanned(self.span())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: ShellTypeName> SpannedTypeName for Tagged<T> {
|
impl<T: ShellTypeName> SpannedTypeName for Tagged<T> {
|
||||||
|
/// Return the spanned type name for a Tagged value
|
||||||
fn spanned_type_name(&self) -> Spanned<&'static str> {
|
fn spanned_type_name(&self) -> Spanned<&'static str> {
|
||||||
self.item.type_name().spanned(self.tag.span)
|
self.item.type_name().spanned(self.tag.span)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A trait to enable pretty-printing of type information
|
||||||
pub trait PrettyType {
|
pub trait PrettyType {
|
||||||
fn pretty_type(&self) -> DebugDocBuilder;
|
fn pretty_type(&self) -> DebugDocBuilder;
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,9 @@
|
|||||||
|
///
|
||||||
|
/// This file describes the structural types of the nushell system.
|
||||||
|
///
|
||||||
|
/// Its primary purpose today is to identify "equivalent" values for the purpose
|
||||||
|
/// of merging rows into a single table or identify rows in a table that have the
|
||||||
|
/// same shape for reflection.
|
||||||
use crate::value::dict::Dictionary;
|
use crate::value::dict::Dictionary;
|
||||||
use crate::value::primitive::Primitive;
|
use crate::value::primitive::Primitive;
|
||||||
use crate::value::range::RangeInclusion;
|
use crate::value::range::RangeInclusion;
|
||||||
@ -9,50 +15,62 @@ use std::collections::BTreeMap;
|
|||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
|
||||||
/**
|
/// Representation of the type of ranges
|
||||||
This file describes the structural types of the nushell system.
|
|
||||||
|
|
||||||
Its primary purpose today is to identify "equivalent" values for the purpose
|
|
||||||
of merging rows into a single table or identify rows in a table that have the
|
|
||||||
same shape for reflection.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, new)]
|
||||||
pub struct RangeType {
|
pub struct RangeType {
|
||||||
from: (Type, RangeInclusion),
|
from: (Type, RangeInclusion),
|
||||||
to: (Type, RangeInclusion),
|
to: (Type, RangeInclusion),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Representation of for the type of a value in Nu
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub enum Type {
|
pub enum Type {
|
||||||
|
/// A value which has no value
|
||||||
Nothing,
|
Nothing,
|
||||||
|
/// An integer-based value
|
||||||
Int,
|
Int,
|
||||||
|
/// A range between two values
|
||||||
Range(Box<RangeType>),
|
Range(Box<RangeType>),
|
||||||
|
/// A decimal (floating point) value
|
||||||
Decimal,
|
Decimal,
|
||||||
|
/// A filesize in bytes
|
||||||
Bytesize,
|
Bytesize,
|
||||||
|
/// A string of text
|
||||||
String,
|
String,
|
||||||
|
/// A line of text (a string with trailing line ending)
|
||||||
Line,
|
Line,
|
||||||
|
/// A path through a table
|
||||||
ColumnPath,
|
ColumnPath,
|
||||||
|
/// A glob pattern (like foo*)
|
||||||
Pattern,
|
Pattern,
|
||||||
|
/// A boolean value
|
||||||
Boolean,
|
Boolean,
|
||||||
|
/// A date value (in UTC)
|
||||||
Date,
|
Date,
|
||||||
|
/// A data duration value
|
||||||
Duration,
|
Duration,
|
||||||
|
/// A filepath value
|
||||||
Path,
|
Path,
|
||||||
|
/// A binary (non-text) buffer value
|
||||||
Binary,
|
Binary,
|
||||||
|
|
||||||
|
/// A row of data
|
||||||
Row(Row),
|
Row(Row),
|
||||||
|
/// A full table of data
|
||||||
Table(Vec<Type>),
|
Table(Vec<Type>),
|
||||||
|
|
||||||
// TODO: Block arguments
|
/// A block of script (TODO)
|
||||||
Block,
|
Block,
|
||||||
// TODO: Error type
|
/// An error value (TODO)
|
||||||
Error,
|
Error,
|
||||||
|
|
||||||
// Stream markers (used as bookend markers rather than actual values)
|
/// Beginning of stream marker (used as bookend markers rather than actual values)
|
||||||
BeginningOfStream,
|
BeginningOfStream,
|
||||||
|
/// End of stream marker (used as bookend markers rather than actual values)
|
||||||
EndOfStream,
|
EndOfStream,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A shape representation of the type of a row
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, new)]
|
||||||
pub struct Row {
|
pub struct Row {
|
||||||
#[new(default)]
|
#[new(default)]
|
||||||
@ -104,6 +122,7 @@ impl<'de> Deserialize<'de> for Row {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Type {
|
impl Type {
|
||||||
|
/// Convert a Primitive into its corresponding Type
|
||||||
pub fn from_primitive(primitive: &Primitive) -> Type {
|
pub fn from_primitive(primitive: &Primitive) -> Type {
|
||||||
match primitive {
|
match primitive {
|
||||||
Primitive::Nothing => Type::Nothing,
|
Primitive::Nothing => Type::Nothing,
|
||||||
@ -134,6 +153,7 @@ impl Type {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert a dictionary into its corresponding row Type
|
||||||
pub fn from_dictionary(dictionary: &Dictionary) -> Type {
|
pub fn from_dictionary(dictionary: &Dictionary) -> Type {
|
||||||
let mut map = BTreeMap::new();
|
let mut map = BTreeMap::new();
|
||||||
|
|
||||||
@ -145,6 +165,7 @@ impl Type {
|
|||||||
Type::Row(Row { map })
|
Type::Row(Row { map })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert a table into its corresponding Type
|
||||||
pub fn from_table<'a>(table: impl IntoIterator<Item = &'a Value>) -> Type {
|
pub fn from_table<'a>(table: impl IntoIterator<Item = &'a Value>) -> Type {
|
||||||
let mut vec = vec![];
|
let mut vec = vec![];
|
||||||
|
|
||||||
@ -155,6 +176,7 @@ impl Type {
|
|||||||
Type::Table(vec)
|
Type::Table(vec)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert a value into its corresponding Type
|
||||||
pub fn from_value<'a>(value: impl Into<&'a UntaggedValue>) -> Type {
|
pub fn from_value<'a>(value: impl Into<&'a UntaggedValue>) -> Type {
|
||||||
match value.into() {
|
match value.into() {
|
||||||
UntaggedValue::Primitive(p) => Type::from_primitive(p),
|
UntaggedValue::Primitive(p) => Type::from_primitive(p),
|
||||||
@ -167,6 +189,7 @@ impl Type {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebug for Type {
|
impl PrettyDebug for Type {
|
||||||
|
/// Prepare Type for pretty-printing
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
Type::Nothing => ty("nothing"),
|
Type::Nothing => ty("nothing"),
|
||||||
@ -268,6 +291,7 @@ impl PrettyDebug for Type {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A view into dictionaries for debug purposes
|
||||||
#[derive(Debug, new)]
|
#[derive(Debug, new)]
|
||||||
struct DebugEntry<'a> {
|
struct DebugEntry<'a> {
|
||||||
key: &'a Column,
|
key: &'a Column,
|
||||||
@ -275,6 +299,7 @@ struct DebugEntry<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> PrettyDebug for DebugEntry<'a> {
|
impl<'a> PrettyDebug for DebugEntry<'a> {
|
||||||
|
/// Prepare debug entries for pretty-printing
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
(b::key(match self.key {
|
(b::key(match self.key {
|
||||||
Column::String(string) => string.clone(),
|
Column::String(string) => string.clone(),
|
||||||
@ -283,6 +308,7 @@ impl<'a> PrettyDebug for DebugEntry<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper to create a pretty-print for the type
|
||||||
fn ty(name: impl std::fmt::Display) -> DebugDocBuilder {
|
fn ty(name: impl std::fmt::Display) -> DebugDocBuilder {
|
||||||
b::kind(format!("{}", name))
|
b::kind(format!("{}", name))
|
||||||
}
|
}
|
||||||
|
@ -23,19 +23,25 @@ use serde::{Deserialize, Serialize};
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::time::SystemTime;
|
use std::time::SystemTime;
|
||||||
|
|
||||||
|
/// The core structured values that flow through a pipeline
|
||||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
||||||
pub enum UntaggedValue {
|
pub enum UntaggedValue {
|
||||||
|
/// A primitive (or fundamental) type of values
|
||||||
Primitive(Primitive),
|
Primitive(Primitive),
|
||||||
|
/// A table row
|
||||||
Row(Dictionary),
|
Row(Dictionary),
|
||||||
|
/// A full inner (or embedded) table
|
||||||
Table(Vec<Value>),
|
Table(Vec<Value>),
|
||||||
|
|
||||||
// Errors are a type of value too
|
/// An error value that represents an error that occurred as the values in the pipeline were built
|
||||||
Error(ShellError),
|
Error(ShellError),
|
||||||
|
|
||||||
|
/// A block of Nu code, eg `{ ls | get name }`
|
||||||
Block(Evaluate),
|
Block(Evaluate),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl UntaggedValue {
|
impl UntaggedValue {
|
||||||
|
/// Tags an UntaggedValue so that it can become a Value
|
||||||
pub fn retag(self, tag: impl Into<Tag>) -> Value {
|
pub fn retag(self, tag: impl Into<Tag>) -> Value {
|
||||||
Value {
|
Value {
|
||||||
value: self,
|
value: self,
|
||||||
@ -43,6 +49,7 @@ impl UntaggedValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the corresponding descriptors (column names) associated with this value
|
||||||
pub fn data_descriptors(&self) -> Vec<String> {
|
pub fn data_descriptors(&self) -> Vec<String> {
|
||||||
match self {
|
match self {
|
||||||
UntaggedValue::Primitive(_) => vec![],
|
UntaggedValue::Primitive(_) => vec![],
|
||||||
@ -53,6 +60,7 @@ impl UntaggedValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert this UntaggedValue to a Value with the given Tag
|
||||||
pub fn into_value(self, tag: impl Into<Tag>) -> Value {
|
pub fn into_value(self, tag: impl Into<Tag>) -> Value {
|
||||||
Value {
|
Value {
|
||||||
value: self,
|
value: self,
|
||||||
@ -60,6 +68,7 @@ impl UntaggedValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert this UntaggedValue into a Value with an empty Tag
|
||||||
pub fn into_untagged_value(self) -> Value {
|
pub fn into_untagged_value(self) -> Value {
|
||||||
Value {
|
Value {
|
||||||
value: self,
|
value: self,
|
||||||
@ -67,6 +76,7 @@ impl UntaggedValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if this value represents boolean true
|
||||||
pub fn is_true(&self) -> bool {
|
pub fn is_true(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
UntaggedValue::Primitive(Primitive::Boolean(true)) => true,
|
UntaggedValue::Primitive(Primitive::Boolean(true)) => true,
|
||||||
@ -74,10 +84,12 @@ impl UntaggedValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if the value represents something other than Nothing
|
||||||
pub fn is_some(&self) -> bool {
|
pub fn is_some(&self) -> bool {
|
||||||
!self.is_none()
|
!self.is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if the value represents Nothing
|
||||||
pub fn is_none(&self) -> bool {
|
pub fn is_none(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
UntaggedValue::Primitive(Primitive::Nothing) => true,
|
UntaggedValue::Primitive(Primitive::Nothing) => true,
|
||||||
@ -85,6 +97,7 @@ impl UntaggedValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if the value represents an error
|
||||||
pub fn is_error(&self) -> bool {
|
pub fn is_error(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
UntaggedValue::Error(_err) => true,
|
UntaggedValue::Error(_err) => true,
|
||||||
@ -92,6 +105,7 @@ impl UntaggedValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Expect this value to be an error and return it
|
||||||
pub fn expect_error(&self) -> ShellError {
|
pub fn expect_error(&self) -> ShellError {
|
||||||
match self {
|
match self {
|
||||||
UntaggedValue::Error(err) => err.clone(),
|
UntaggedValue::Error(err) => err.clone(),
|
||||||
@ -99,6 +113,7 @@ impl UntaggedValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Expect this value to be a string and return it
|
||||||
pub fn expect_string(&self) -> &str {
|
pub fn expect_string(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
UntaggedValue::Primitive(Primitive::String(string)) => &string[..],
|
UntaggedValue::Primitive(Primitive::String(string)) => &string[..],
|
||||||
@ -106,53 +121,64 @@ impl UntaggedValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
/// Helper for creating row values
|
||||||
pub fn row(entries: IndexMap<String, Value>) -> UntaggedValue {
|
pub fn row(entries: IndexMap<String, Value>) -> UntaggedValue {
|
||||||
UntaggedValue::Row(entries.into())
|
UntaggedValue::Row(entries.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating table values
|
||||||
pub fn table(list: &[Value]) -> UntaggedValue {
|
pub fn table(list: &[Value]) -> UntaggedValue {
|
||||||
UntaggedValue::Table(list.to_vec())
|
UntaggedValue::Table(list.to_vec())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating string values
|
||||||
pub fn string(s: impl Into<String>) -> UntaggedValue {
|
pub fn string(s: impl Into<String>) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::String(s.into()))
|
UntaggedValue::Primitive(Primitive::String(s.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating line values
|
||||||
pub fn line(s: impl Into<String>) -> UntaggedValue {
|
pub fn line(s: impl Into<String>) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::Line(s.into()))
|
UntaggedValue::Primitive(Primitive::Line(s.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating column-path values
|
||||||
pub fn column_path(s: Vec<impl Into<PathMember>>) -> UntaggedValue {
|
pub fn column_path(s: Vec<impl Into<PathMember>>) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(
|
UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(
|
||||||
s.into_iter().map(|p| p.into()).collect(),
|
s.into_iter().map(|p| p.into()).collect(),
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating integer values
|
||||||
pub fn int(i: impl Into<BigInt>) -> UntaggedValue {
|
pub fn int(i: impl Into<BigInt>) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::Int(i.into()))
|
UntaggedValue::Primitive(Primitive::Int(i.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating glob pattern values
|
||||||
pub fn pattern(s: impl Into<String>) -> UntaggedValue {
|
pub fn pattern(s: impl Into<String>) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::String(s.into()))
|
UntaggedValue::Primitive(Primitive::String(s.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating filepath values
|
||||||
pub fn path(s: impl Into<PathBuf>) -> UntaggedValue {
|
pub fn path(s: impl Into<PathBuf>) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::Path(s.into()))
|
UntaggedValue::Primitive(Primitive::Path(s.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating bytesize values
|
||||||
pub fn bytes(s: impl Into<u64>) -> UntaggedValue {
|
pub fn bytes(s: impl Into<u64>) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::Bytes(s.into()))
|
UntaggedValue::Primitive(Primitive::Bytes(s.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating decimal values
|
||||||
pub fn decimal(s: impl Into<BigDecimal>) -> UntaggedValue {
|
pub fn decimal(s: impl Into<BigDecimal>) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::Decimal(s.into()))
|
UntaggedValue::Primitive(Primitive::Decimal(s.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating binary (non-text) buffer values
|
||||||
pub fn binary(binary: Vec<u8>) -> UntaggedValue {
|
pub fn binary(binary: Vec<u8>) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::Binary(binary))
|
UntaggedValue::Primitive(Primitive::Binary(binary))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating range values
|
||||||
pub fn range(
|
pub fn range(
|
||||||
left: (Spanned<Primitive>, RangeInclusion),
|
left: (Spanned<Primitive>, RangeInclusion),
|
||||||
right: (Spanned<Primitive>, RangeInclusion),
|
right: (Spanned<Primitive>, RangeInclusion),
|
||||||
@ -160,29 +186,35 @@ impl UntaggedValue {
|
|||||||
UntaggedValue::Primitive(Primitive::Range(Box::new(Range::new(left, right))))
|
UntaggedValue::Primitive(Primitive::Range(Box::new(Range::new(left, right))))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating boolean values
|
||||||
pub fn boolean(s: impl Into<bool>) -> UntaggedValue {
|
pub fn boolean(s: impl Into<bool>) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::Boolean(s.into()))
|
UntaggedValue::Primitive(Primitive::Boolean(s.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating date duration values
|
||||||
pub fn duration(secs: u64) -> UntaggedValue {
|
pub fn duration(secs: u64) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::Duration(secs))
|
UntaggedValue::Primitive(Primitive::Duration(secs))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating datatime values
|
||||||
pub fn system_date(s: SystemTime) -> UntaggedValue {
|
pub fn system_date(s: SystemTime) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::Date(s.into()))
|
UntaggedValue::Primitive(Primitive::Date(s.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Helper for creating the Nothing value
|
||||||
pub fn nothing() -> UntaggedValue {
|
pub fn nothing() -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::Nothing)
|
UntaggedValue::Primitive(Primitive::Nothing)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The fundamental structured value that flows through the pipeline, with associated metadata
|
||||||
#[derive(Debug, Clone, PartialOrd, PartialEq, Ord, Eq, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialOrd, PartialEq, Ord, Eq, Hash, Serialize, Deserialize)]
|
||||||
pub struct Value {
|
pub struct Value {
|
||||||
pub value: UntaggedValue,
|
pub value: UntaggedValue,
|
||||||
pub tag: Tag,
|
pub tag: Tag,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Overload deferencing to give back the UntaggedValue inside of a Value
|
||||||
impl std::ops::Deref for Value {
|
impl std::ops::Deref for Value {
|
||||||
type Target = UntaggedValue;
|
type Target = UntaggedValue;
|
||||||
|
|
||||||
@ -192,18 +224,22 @@ impl std::ops::Deref for Value {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Value {
|
impl Value {
|
||||||
|
/// Get the corresponding anchor (originating location) for the Value
|
||||||
pub fn anchor(&self) -> Option<AnchorLocation> {
|
pub fn anchor(&self) -> Option<AnchorLocation> {
|
||||||
self.tag.anchor()
|
self.tag.anchor()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the name (url, filepath, etc) behind an anchor for the Value
|
||||||
pub fn anchor_name(&self) -> Option<String> {
|
pub fn anchor_name(&self) -> Option<String> {
|
||||||
self.tag.anchor_name()
|
self.tag.anchor_name()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the metadata for the Value
|
||||||
pub fn tag(&self) -> Tag {
|
pub fn tag(&self) -> Tag {
|
||||||
self.tag.clone()
|
self.tag.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// View the Value as a string, if possible
|
||||||
pub fn as_string(&self) -> Result<String, ShellError> {
|
pub fn as_string(&self) -> Result<String, ShellError> {
|
||||||
match &self.value {
|
match &self.value {
|
||||||
UntaggedValue::Primitive(Primitive::String(string)) => Ok(string.clone()),
|
UntaggedValue::Primitive(Primitive::String(string)) => Ok(string.clone()),
|
||||||
@ -212,6 +248,7 @@ impl Value {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// View into the borrowed string contents of a Value, if possible
|
||||||
pub fn as_forgiving_string(&self) -> Result<&str, ShellError> {
|
pub fn as_forgiving_string(&self) -> Result<&str, ShellError> {
|
||||||
match &self.value {
|
match &self.value {
|
||||||
UntaggedValue::Primitive(Primitive::String(string)) => Ok(&string[..]),
|
UntaggedValue::Primitive(Primitive::String(string)) => Ok(&string[..]),
|
||||||
@ -219,6 +256,7 @@ impl Value {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// View the Value as a path, if possible
|
||||||
pub fn as_path(&self) -> Result<PathBuf, ShellError> {
|
pub fn as_path(&self) -> Result<PathBuf, ShellError> {
|
||||||
match &self.value {
|
match &self.value {
|
||||||
UntaggedValue::Primitive(Primitive::Path(path)) => Ok(path.clone()),
|
UntaggedValue::Primitive(Primitive::Path(path)) => Ok(path.clone()),
|
||||||
@ -227,6 +265,7 @@ impl Value {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// View the Value as a Primitive value, if possible
|
||||||
pub fn as_primitive(&self) -> Result<Primitive, ShellError> {
|
pub fn as_primitive(&self) -> Result<Primitive, ShellError> {
|
||||||
match &self.value {
|
match &self.value {
|
||||||
UntaggedValue::Primitive(primitive) => Ok(primitive.clone()),
|
UntaggedValue::Primitive(primitive) => Ok(primitive.clone()),
|
||||||
@ -237,26 +276,51 @@ impl Value {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// View the Value as unsigned 64-bit, if possible
|
||||||
pub fn as_u64(&self) -> Result<u64, ShellError> {
|
pub fn as_u64(&self) -> Result<u64, ShellError> {
|
||||||
match &self.value {
|
match &self.value {
|
||||||
UntaggedValue::Primitive(primitive) => primitive.as_u64(self.tag.span),
|
UntaggedValue::Primitive(primitive) => primitive.as_u64(self.tag.span),
|
||||||
_ => Err(ShellError::type_error("integer", self.spanned_type_name())),
|
_ => Err(ShellError::type_error("integer", self.spanned_type_name())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// View the Value as boolean, if possible
|
||||||
|
pub fn as_bool(&self) -> Result<bool, ShellError> {
|
||||||
|
match &self.value {
|
||||||
|
UntaggedValue::Primitive(Primitive::Boolean(p)) => Ok(*p),
|
||||||
|
_ => Err(ShellError::type_error("boolean", self.spanned_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<Value> for String {
|
||||||
|
fn into(self) -> Value {
|
||||||
|
let end = self.len();
|
||||||
|
Value {
|
||||||
|
value: self.into(),
|
||||||
|
tag: Tag {
|
||||||
|
anchor: None,
|
||||||
|
span: Span::new(0, end),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<UntaggedValue> for &str {
|
impl Into<UntaggedValue> for &str {
|
||||||
|
/// Convert a string slice into an UntaggedValue
|
||||||
fn into(self) -> UntaggedValue {
|
fn into(self) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::String(self.to_string()))
|
UntaggedValue::Primitive(Primitive::String(self.to_string()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<UntaggedValue> for Value {
|
impl Into<UntaggedValue> for Value {
|
||||||
|
/// Convert a Value into an UntaggedValue
|
||||||
fn into(self) -> UntaggedValue {
|
fn into(self) -> UntaggedValue {
|
||||||
self.value
|
self.value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert a borrowed Value into a borrowed UntaggedValue
|
||||||
impl<'a> Into<&'a UntaggedValue> for &'a Value {
|
impl<'a> Into<&'a UntaggedValue> for &'a Value {
|
||||||
fn into(self) -> &'a UntaggedValue {
|
fn into(self) -> &'a UntaggedValue {
|
||||||
&self.value
|
&self.value
|
||||||
@ -264,18 +328,21 @@ impl<'a> Into<&'a UntaggedValue> for &'a Value {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl HasSpan for Value {
|
impl HasSpan for Value {
|
||||||
|
/// Return the corresponding Span for the Value
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
self.tag.span
|
self.tag.span
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ShellTypeName for Value {
|
impl ShellTypeName for Value {
|
||||||
|
/// Get the type name for the Value
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
ShellTypeName::type_name(&self.value)
|
ShellTypeName::type_name(&self.value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ShellTypeName for UntaggedValue {
|
impl ShellTypeName for UntaggedValue {
|
||||||
|
/// Get the type name for the UntaggedValue
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
match &self {
|
match &self {
|
||||||
UntaggedValue::Primitive(p) => p.type_name(),
|
UntaggedValue::Primitive(p) => p.type_name(),
|
||||||
@ -288,13 +355,21 @@ impl ShellTypeName for UntaggedValue {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<Primitive> for UntaggedValue {
|
impl From<Primitive> for UntaggedValue {
|
||||||
|
/// Convert a Primitive to an UntaggedValue
|
||||||
fn from(input: Primitive) -> UntaggedValue {
|
fn from(input: Primitive) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(input)
|
UntaggedValue::Primitive(input)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<String> for UntaggedValue {
|
impl From<String> for UntaggedValue {
|
||||||
|
/// Convert a String to an UntaggedValue
|
||||||
fn from(input: String) -> UntaggedValue {
|
fn from(input: String) -> UntaggedValue {
|
||||||
UntaggedValue::Primitive(Primitive::String(input))
|
UntaggedValue::Primitive(Primitive::String(input))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<ShellError> for UntaggedValue {
|
||||||
|
fn from(e: ShellError) -> Self {
|
||||||
|
UntaggedValue::Error(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -5,6 +5,7 @@ use nu_source::{b, span_for_spanned_list, DebugDocBuilder, HasFallibleSpan, Pret
|
|||||||
use num_bigint::BigInt;
|
use num_bigint::BigInt;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// A PathMember that has yet to be spanned so that it can be used in later processing
|
||||||
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub enum UnspannedPathMember {
|
pub enum UnspannedPathMember {
|
||||||
String(String),
|
String(String),
|
||||||
@ -12,6 +13,7 @@ pub enum UnspannedPathMember {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl UnspannedPathMember {
|
impl UnspannedPathMember {
|
||||||
|
/// Add the span information and get a full PathMember
|
||||||
pub fn into_path_member(self, span: impl Into<Span>) -> PathMember {
|
pub fn into_path_member(self, span: impl Into<Span>) -> PathMember {
|
||||||
PathMember {
|
PathMember {
|
||||||
unspanned: self,
|
unspanned: self,
|
||||||
@ -20,6 +22,7 @@ impl UnspannedPathMember {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A basic piece of a ColumnPath, which describes the steps to take through a table to arrive a cell, row, or inner table
|
||||||
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub struct PathMember {
|
pub struct PathMember {
|
||||||
pub unspanned: UnspannedPathMember,
|
pub unspanned: UnspannedPathMember,
|
||||||
@ -27,6 +30,7 @@ pub struct PathMember {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebug for &PathMember {
|
impl PrettyDebug for &PathMember {
|
||||||
|
/// Gets the PathMember ready to be pretty-printed
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
match &self.unspanned {
|
match &self.unspanned {
|
||||||
UnspannedPathMember::String(string) => b::primitive(format!("{:?}", string)),
|
UnspannedPathMember::String(string) => b::primitive(format!("{:?}", string)),
|
||||||
@ -35,6 +39,10 @@ impl PrettyDebug for &PathMember {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The fundamental path primitive to descrive how to navigate through a table to get to a sub-item. A path member can be either a word or a number. Words/strings are taken to mean
|
||||||
|
/// a column name, and numbers are the row number. Taken together they describe which column or row to narrow to in order to get data.
|
||||||
|
///
|
||||||
|
/// Rows must follow column names, they can't come first. eg) `foo.1` is valid where `1.foo` is not.
|
||||||
#[derive(
|
#[derive(
|
||||||
Debug, Hash, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Getters, Clone, new,
|
Debug, Hash, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Getters, Clone, new,
|
||||||
)]
|
)]
|
||||||
@ -44,16 +52,19 @@ pub struct ColumnPath {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ColumnPath {
|
impl ColumnPath {
|
||||||
|
/// Iterate over the members of the column path
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &PathMember> {
|
pub fn iter(&self) -> impl Iterator<Item = &PathMember> {
|
||||||
self.members.iter()
|
self.members.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the last member and a slice of the remaining members
|
||||||
pub fn split_last(&self) -> Option<(&PathMember, &[PathMember])> {
|
pub fn split_last(&self) -> Option<(&PathMember, &[PathMember])> {
|
||||||
self.members.split_last()
|
self.members.split_last()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebug for ColumnPath {
|
impl PrettyDebug for ColumnPath {
|
||||||
|
/// Gets the ColumnPath ready to be pretty-printed
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
let members: Vec<DebugDocBuilder> =
|
let members: Vec<DebugDocBuilder> =
|
||||||
self.members.iter().map(|member| member.pretty()).collect();
|
self.members.iter().map(|member| member.pretty()).collect();
|
||||||
@ -68,6 +79,7 @@ impl PrettyDebug for ColumnPath {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl HasFallibleSpan for ColumnPath {
|
impl HasFallibleSpan for ColumnPath {
|
||||||
|
/// Creates a span that will cover the column path, if possible
|
||||||
fn maybe_span(&self) -> Option<Span> {
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
if self.members.is_empty() {
|
if self.members.is_empty() {
|
||||||
None
|
None
|
||||||
@ -78,15 +90,18 @@ impl HasFallibleSpan for ColumnPath {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PathMember {
|
impl PathMember {
|
||||||
|
/// Create a string path member
|
||||||
pub fn string(string: impl Into<String>, span: impl Into<Span>) -> PathMember {
|
pub fn string(string: impl Into<String>, span: impl Into<Span>) -> PathMember {
|
||||||
UnspannedPathMember::String(string.into()).into_path_member(span)
|
UnspannedPathMember::String(string.into()).into_path_member(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a numeric path member
|
||||||
pub fn int(int: impl Into<BigInt>, span: impl Into<Span>) -> PathMember {
|
pub fn int(int: impl Into<BigInt>, span: impl Into<Span>) -> PathMember {
|
||||||
UnspannedPathMember::Int(int.into()).into_path_member(span)
|
UnspannedPathMember::Int(int.into()).into_path_member(span)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Prepares a list of "sounds like" matches for the string you're trying to find
|
||||||
pub fn did_you_mean(obj_source: &Value, field_tried: &PathMember) -> Option<Vec<(usize, String)>> {
|
pub fn did_you_mean(obj_source: &Value, field_tried: &PathMember) -> Option<Vec<(usize, String)>> {
|
||||||
let field_tried = match &field_tried.unspanned {
|
let field_tried = match &field_tried.unspanned {
|
||||||
UnspannedPathMember::String(string) => string.clone(),
|
UnspannedPathMember::String(string) => string.clone(),
|
||||||
|
@ -8,6 +8,7 @@ use nu_source::TaggedItem;
|
|||||||
impl std::convert::TryFrom<&Value> for i64 {
|
impl std::convert::TryFrom<&Value> for i64 {
|
||||||
type Error = ShellError;
|
type Error = ShellError;
|
||||||
|
|
||||||
|
/// Convert to an i64 integer, if possible
|
||||||
fn try_from(value: &Value) -> Result<i64, ShellError> {
|
fn try_from(value: &Value) -> Result<i64, ShellError> {
|
||||||
match &value.value {
|
match &value.value {
|
||||||
UntaggedValue::Primitive(Primitive::Int(int)) => {
|
UntaggedValue::Primitive(Primitive::Int(int)) => {
|
||||||
@ -21,6 +22,7 @@ impl std::convert::TryFrom<&Value> for i64 {
|
|||||||
impl std::convert::TryFrom<&Value> for String {
|
impl std::convert::TryFrom<&Value> for String {
|
||||||
type Error = ShellError;
|
type Error = ShellError;
|
||||||
|
|
||||||
|
/// Convert to a string, if possible
|
||||||
fn try_from(value: &Value) -> Result<String, ShellError> {
|
fn try_from(value: &Value) -> Result<String, ShellError> {
|
||||||
match &value.value {
|
match &value.value {
|
||||||
UntaggedValue::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
UntaggedValue::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||||
@ -32,6 +34,7 @@ impl std::convert::TryFrom<&Value> for String {
|
|||||||
impl std::convert::TryFrom<&Value> for Vec<u8> {
|
impl std::convert::TryFrom<&Value> for Vec<u8> {
|
||||||
type Error = ShellError;
|
type Error = ShellError;
|
||||||
|
|
||||||
|
/// Convert to a u8 vec, if possible
|
||||||
fn try_from(value: &Value) -> Result<Vec<u8>, ShellError> {
|
fn try_from(value: &Value) -> Result<Vec<u8>, ShellError> {
|
||||||
match &value.value {
|
match &value.value {
|
||||||
UntaggedValue::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
|
UntaggedValue::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
|
||||||
@ -43,6 +46,7 @@ impl std::convert::TryFrom<&Value> for Vec<u8> {
|
|||||||
impl<'a> std::convert::TryFrom<&'a Value> for &'a Dictionary {
|
impl<'a> std::convert::TryFrom<&'a Value> for &'a Dictionary {
|
||||||
type Error = ShellError;
|
type Error = ShellError;
|
||||||
|
|
||||||
|
/// Convert to a dictionary, if possible
|
||||||
fn try_from(value: &'a Value) -> Result<&'a Dictionary, ShellError> {
|
fn try_from(value: &'a Value) -> Result<&'a Dictionary, ShellError> {
|
||||||
match &value.value {
|
match &value.value {
|
||||||
UntaggedValue::Row(d) => Ok(d),
|
UntaggedValue::Row(d) => Ok(d),
|
||||||
|
@ -4,12 +4,14 @@ use crate::value::{UntaggedValue, Value};
|
|||||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||||
|
|
||||||
impl PrettyDebug for &Value {
|
impl PrettyDebug for &Value {
|
||||||
|
/// Get a borrowed Value ready to be pretty-printed
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
PrettyDebug::pretty(*self)
|
PrettyDebug::pretty(*self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebug for Value {
|
impl PrettyDebug for Value {
|
||||||
|
/// Get a Value ready to be pretty-printed
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
match &self.value {
|
match &self.value {
|
||||||
UntaggedValue::Primitive(p) => p.pretty(),
|
UntaggedValue::Primitive(p) => p.pretty(),
|
||||||
@ -24,6 +26,7 @@ impl PrettyDebug for Value {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyType for Primitive {
|
impl PrettyType for Primitive {
|
||||||
|
/// Find the type of the Value and prepare it for pretty-printing
|
||||||
fn pretty_type(&self) -> DebugDocBuilder {
|
fn pretty_type(&self) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
Primitive::Nothing => ty("nothing"),
|
Primitive::Nothing => ty("nothing"),
|
||||||
@ -47,6 +50,7 @@ impl PrettyType for Primitive {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebug for Primitive {
|
impl PrettyDebug for Primitive {
|
||||||
|
/// Get a Primitive value ready to be pretty-printed
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
Primitive::Nothing => b::primitive("nothing"),
|
Primitive::Nothing => b::primitive("nothing"),
|
||||||
|
@ -9,6 +9,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
use std::cmp::{Ord, Ordering, PartialOrd};
|
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
|
|
||||||
|
/// A dictionary that can hold a mapping from names to Values
|
||||||
#[derive(Debug, Default, Serialize, Deserialize, PartialEq, Eq, Clone, Getters, new)]
|
#[derive(Debug, Default, Serialize, Deserialize, PartialEq, Eq, Clone, Getters, new)]
|
||||||
pub struct Dictionary {
|
pub struct Dictionary {
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
@ -17,6 +18,7 @@ pub struct Dictionary {
|
|||||||
|
|
||||||
#[allow(clippy::derive_hash_xor_eq)]
|
#[allow(clippy::derive_hash_xor_eq)]
|
||||||
impl Hash for Dictionary {
|
impl Hash for Dictionary {
|
||||||
|
/// Create the hash function to allow the Hash trait for dictionaries
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
let mut entries = self.entries.clone();
|
let mut entries = self.entries.clone();
|
||||||
entries.sort_keys();
|
entries.sort_keys();
|
||||||
@ -26,6 +28,7 @@ impl Hash for Dictionary {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PartialOrd for Dictionary {
|
impl PartialOrd for Dictionary {
|
||||||
|
/// Compare two dictionaries for sort ordering
|
||||||
fn partial_cmp(&self, other: &Dictionary) -> Option<Ordering> {
|
fn partial_cmp(&self, other: &Dictionary) -> Option<Ordering> {
|
||||||
let this: Vec<&String> = self.entries.keys().collect();
|
let this: Vec<&String> = self.entries.keys().collect();
|
||||||
let that: Vec<&String> = other.entries.keys().collect();
|
let that: Vec<&String> = other.entries.keys().collect();
|
||||||
@ -42,6 +45,7 @@ impl PartialOrd for Dictionary {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Ord for Dictionary {
|
impl Ord for Dictionary {
|
||||||
|
/// Compare two dictionaries for ordering
|
||||||
fn cmp(&self, other: &Dictionary) -> Ordering {
|
fn cmp(&self, other: &Dictionary) -> Ordering {
|
||||||
let this: Vec<&String> = self.entries.keys().collect();
|
let this: Vec<&String> = self.entries.keys().collect();
|
||||||
let that: Vec<&String> = other.entries.keys().collect();
|
let that: Vec<&String> = other.entries.keys().collect();
|
||||||
@ -58,6 +62,7 @@ impl Ord for Dictionary {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq<Value> for Dictionary {
|
impl PartialEq<Value> for Dictionary {
|
||||||
|
/// Test a dictionary against a Value for equality
|
||||||
fn eq(&self, other: &Value) -> bool {
|
fn eq(&self, other: &Value) -> bool {
|
||||||
match &other.value {
|
match &other.value {
|
||||||
UntaggedValue::Row(d) => self == d,
|
UntaggedValue::Row(d) => self == d,
|
||||||
@ -66,6 +71,7 @@ impl PartialEq<Value> for Dictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A key-value pair specifically meant to be used in debug and pretty-printing
|
||||||
#[derive(Debug, new)]
|
#[derive(Debug, new)]
|
||||||
struct DebugEntry<'a> {
|
struct DebugEntry<'a> {
|
||||||
key: &'a str,
|
key: &'a str,
|
||||||
@ -73,12 +79,14 @@ struct DebugEntry<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> PrettyDebug for DebugEntry<'a> {
|
impl<'a> PrettyDebug for DebugEntry<'a> {
|
||||||
|
/// Build the the information to pretty-print the DebugEntry
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
(b::key(self.key.to_string()) + b::equals() + self.value.pretty().into_value()).group()
|
(b::key(self.key.to_string()) + b::equals() + self.value.pretty().into_value()).group()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebug for Dictionary {
|
impl PrettyDebug for Dictionary {
|
||||||
|
/// Get a Dictionary ready to be pretty-printed
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
b::delimit(
|
b::delimit(
|
||||||
"(",
|
"(",
|
||||||
@ -94,6 +102,7 @@ impl PrettyDebug for Dictionary {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<IndexMap<String, Value>> for Dictionary {
|
impl From<IndexMap<String, Value>> for Dictionary {
|
||||||
|
/// Create a dictionary from a map of strings to Values
|
||||||
fn from(input: IndexMap<String, Value>) -> Dictionary {
|
fn from(input: IndexMap<String, Value>) -> Dictionary {
|
||||||
let mut out = IndexMap::default();
|
let mut out = IndexMap::default();
|
||||||
|
|
||||||
@ -106,6 +115,7 @@ impl From<IndexMap<String, Value>> for Dictionary {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Dictionary {
|
impl Dictionary {
|
||||||
|
/// Find the matching Value for a given key, if possible. If not, return a Primitive::Nothing
|
||||||
pub fn get_data(&self, desc: &str) -> MaybeOwned<'_, Value> {
|
pub fn get_data(&self, desc: &str) -> MaybeOwned<'_, Value> {
|
||||||
match self.entries.get(desc) {
|
match self.entries.get(desc) {
|
||||||
Some(v) => MaybeOwned::Borrowed(v),
|
Some(v) => MaybeOwned::Borrowed(v),
|
||||||
@ -115,10 +125,17 @@ impl Dictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Iterate the keys in the Dictionary
|
||||||
pub fn keys(&self) -> impl Iterator<Item = &String> {
|
pub fn keys(&self) -> impl Iterator<Item = &String> {
|
||||||
self.entries.keys()
|
self.entries.keys()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Checks if given key exists
|
||||||
|
pub fn contains_key(&self, key: &str) -> bool {
|
||||||
|
self.entries.contains_key(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find the matching Value for a key, if possible
|
||||||
pub fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value> {
|
pub fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value> {
|
||||||
let result = self
|
let result = self
|
||||||
.entries
|
.entries
|
||||||
@ -134,6 +151,7 @@ impl Dictionary {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get a mutable entry that matches a key, if possible
|
||||||
pub fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Value> {
|
pub fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Value> {
|
||||||
match self
|
match self
|
||||||
.entries
|
.entries
|
||||||
@ -145,11 +163,13 @@ impl Dictionary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Insert a new key/value pair into the dictionary
|
||||||
pub fn insert_data_at_key(&mut self, name: &str, value: Value) {
|
pub fn insert_data_at_key(&mut self, name: &str, value: Value) {
|
||||||
self.entries.insert(name.to_string(), value);
|
self.entries.insert(name.to_string(), value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A helper to help create dictionaries for you. It has the ability to insert values into the dictionary while maintaining the tags that need to be applied to the individual members
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct TaggedDictBuilder {
|
pub struct TaggedDictBuilder {
|
||||||
tag: Tag,
|
tag: Tag,
|
||||||
@ -157,6 +177,7 @@ pub struct TaggedDictBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TaggedDictBuilder {
|
impl TaggedDictBuilder {
|
||||||
|
/// Create a new builder
|
||||||
pub fn new(tag: impl Into<Tag>) -> TaggedDictBuilder {
|
pub fn new(tag: impl Into<Tag>) -> TaggedDictBuilder {
|
||||||
TaggedDictBuilder {
|
TaggedDictBuilder {
|
||||||
tag: tag.into(),
|
tag: tag.into(),
|
||||||
@ -164,12 +185,14 @@ impl TaggedDictBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Build the contents of the builder into a Value
|
||||||
pub fn build(tag: impl Into<Tag>, block: impl FnOnce(&mut TaggedDictBuilder)) -> Value {
|
pub fn build(tag: impl Into<Tag>, block: impl FnOnce(&mut TaggedDictBuilder)) -> Value {
|
||||||
let mut builder = TaggedDictBuilder::new(tag);
|
let mut builder = TaggedDictBuilder::new(tag);
|
||||||
block(&mut builder);
|
block(&mut builder);
|
||||||
builder.into_value()
|
builder.into_value()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a new builder with a pre-defined capacity
|
||||||
pub fn with_capacity(tag: impl Into<Tag>, n: usize) -> TaggedDictBuilder {
|
pub fn with_capacity(tag: impl Into<Tag>, n: usize) -> TaggedDictBuilder {
|
||||||
TaggedDictBuilder {
|
TaggedDictBuilder {
|
||||||
tag: tag.into(),
|
tag: tag.into(),
|
||||||
@ -177,30 +200,36 @@ impl TaggedDictBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Insert an untagged key/value pair into the dictionary, to later be tagged when built
|
||||||
pub fn insert_untagged(&mut self, key: impl Into<String>, value: impl Into<UntaggedValue>) {
|
pub fn insert_untagged(&mut self, key: impl Into<String>, value: impl Into<UntaggedValue>) {
|
||||||
self.dict
|
self.dict
|
||||||
.insert(key.into(), value.into().into_value(&self.tag));
|
.insert(key.into(), value.into().into_value(&self.tag));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Insert a key/value pair into the dictionary
|
||||||
pub fn insert_value(&mut self, key: impl Into<String>, value: impl Into<Value>) {
|
pub fn insert_value(&mut self, key: impl Into<String>, value: impl Into<Value>) {
|
||||||
self.dict.insert(key.into(), value.into());
|
self.dict.insert(key.into(), value.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert the dictionary into a tagged Value using the original tag
|
||||||
pub fn into_value(self) -> Value {
|
pub fn into_value(self) -> Value {
|
||||||
let tag = self.tag.clone();
|
let tag = self.tag.clone();
|
||||||
self.into_untagged_value().into_value(tag)
|
self.into_untagged_value().into_value(tag)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert the dictionary into an UntaggedValue
|
||||||
pub fn into_untagged_value(self) -> UntaggedValue {
|
pub fn into_untagged_value(self) -> UntaggedValue {
|
||||||
UntaggedValue::Row(Dictionary { entries: self.dict })
|
UntaggedValue::Row(Dictionary { entries: self.dict })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if the dictionary is empty, false otherwise
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
self.dict.is_empty()
|
self.dict.is_empty()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<TaggedDictBuilder> for Value {
|
impl From<TaggedDictBuilder> for Value {
|
||||||
|
/// Convert a builder into a tagged Value
|
||||||
fn from(input: TaggedDictBuilder) -> Value {
|
fn from(input: TaggedDictBuilder) -> Value {
|
||||||
input.into_value()
|
input.into_value()
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,9 @@ use serde::{Deserialize, Serialize};
|
|||||||
use std::cmp::{Ord, Ordering, PartialOrd};
|
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
/// An evaluation scope. Scopes map variable names to Values and aid in evaluating blocks and expressions.
|
||||||
|
/// Additionally, holds the value for the special $it variable, a variable used to refer to the value passing
|
||||||
|
/// through the pipeline at that moment
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Scope {
|
pub struct Scope {
|
||||||
pub it: Value,
|
pub it: Value,
|
||||||
@ -13,6 +16,7 @@ pub struct Scope {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Scope {
|
impl Scope {
|
||||||
|
/// Create a new scope
|
||||||
pub fn new(it: Value) -> Scope {
|
pub fn new(it: Value) -> Scope {
|
||||||
Scope {
|
Scope {
|
||||||
it,
|
it,
|
||||||
@ -22,6 +26,7 @@ impl Scope {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Scope {
|
impl Scope {
|
||||||
|
/// Create an empty scope
|
||||||
pub fn empty() -> Scope {
|
pub fn empty() -> Scope {
|
||||||
Scope {
|
Scope {
|
||||||
it: UntaggedValue::Primitive(Primitive::Nothing).into_untagged_value(),
|
it: UntaggedValue::Primitive(Primitive::Nothing).into_untagged_value(),
|
||||||
@ -29,6 +34,7 @@ impl Scope {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create an empty scope, setting $it to a known Value
|
||||||
pub fn it_value(value: Value) -> Scope {
|
pub fn it_value(value: Value) -> Scope {
|
||||||
Scope {
|
Scope {
|
||||||
it: value,
|
it: value,
|
||||||
|
@ -4,7 +4,6 @@ use crate::value::range::Range;
|
|||||||
use crate::value::{serde_bigdecimal, serde_bigint};
|
use crate::value::{serde_bigdecimal, serde_bigint};
|
||||||
use bigdecimal::BigDecimal;
|
use bigdecimal::BigDecimal;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use chrono_humanize::Humanize;
|
|
||||||
use nu_errors::{ExpectedRange, ShellError};
|
use nu_errors::{ExpectedRange, ShellError};
|
||||||
use nu_source::{PrettyDebug, Span, SpannedItem};
|
use nu_source::{PrettyDebug, Span, SpannedItem};
|
||||||
use num_bigint::BigInt;
|
use num_bigint::BigInt;
|
||||||
@ -12,32 +11,52 @@ use num_traits::cast::{FromPrimitive, ToPrimitive};
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
/// The most fundamental of structured values in Nu are the Primitive values. These values represent types like integers, strings, booleans, dates, etc that are then used
|
||||||
|
/// as the buildig blocks to build up more complex structures.
|
||||||
|
///
|
||||||
|
/// Primitives also include marker values BeginningOfStream and EndOfStream which denote a change of condition in the stream
|
||||||
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Deserialize, Serialize)]
|
||||||
pub enum Primitive {
|
pub enum Primitive {
|
||||||
|
/// An empty value
|
||||||
Nothing,
|
Nothing,
|
||||||
|
/// A "big int", an integer with arbitrarily large size (aka not limited to 64-bit)
|
||||||
#[serde(with = "serde_bigint")]
|
#[serde(with = "serde_bigint")]
|
||||||
Int(BigInt),
|
Int(BigInt),
|
||||||
|
/// A "big decimal", an decimal number with arbitrarily large size (aka not limited to 64-bit)
|
||||||
#[serde(with = "serde_bigdecimal")]
|
#[serde(with = "serde_bigdecimal")]
|
||||||
Decimal(BigDecimal),
|
Decimal(BigDecimal),
|
||||||
|
/// A count in the number of bytes, used as a filesize
|
||||||
Bytes(u64),
|
Bytes(u64),
|
||||||
|
/// A string value
|
||||||
String(String),
|
String(String),
|
||||||
|
/// A string value with an implied carriage return (or cr/lf) ending
|
||||||
Line(String),
|
Line(String),
|
||||||
|
/// A path to travel to reach a value in a table
|
||||||
ColumnPath(ColumnPath),
|
ColumnPath(ColumnPath),
|
||||||
|
/// A glob pattern, eg foo*
|
||||||
Pattern(String),
|
Pattern(String),
|
||||||
|
/// A boolean value
|
||||||
Boolean(bool),
|
Boolean(bool),
|
||||||
|
/// A date value, in UTC
|
||||||
Date(DateTime<Utc>),
|
Date(DateTime<Utc>),
|
||||||
Duration(u64), // Duration in seconds
|
/// A count in the number of seconds
|
||||||
|
Duration(u64),
|
||||||
|
/// A range of values
|
||||||
Range(Box<Range>),
|
Range(Box<Range>),
|
||||||
|
/// A file path
|
||||||
Path(PathBuf),
|
Path(PathBuf),
|
||||||
|
/// A vector of raw binary data
|
||||||
#[serde(with = "serde_bytes")]
|
#[serde(with = "serde_bytes")]
|
||||||
Binary(Vec<u8>),
|
Binary(Vec<u8>),
|
||||||
|
|
||||||
// Stream markers (used as bookend markers rather than actual values)
|
/// Beginning of stream marker, a pseudo-value not intended for tables
|
||||||
BeginningOfStream,
|
BeginningOfStream,
|
||||||
|
/// End of stream marker, a pseudo-value not intended for tables
|
||||||
EndOfStream,
|
EndOfStream,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Primitive {
|
impl Primitive {
|
||||||
|
/// Converts a primitive value to a u64, if possible. Uses a span to build an error if the conversion isn't possible.
|
||||||
pub fn as_u64(&self, span: Span) -> Result<u64, ShellError> {
|
pub fn as_u64(&self, span: Span) -> Result<u64, ShellError> {
|
||||||
match self {
|
match self {
|
||||||
Primitive::Int(int) => match int.to_u64() {
|
Primitive::Int(int) => match int.to_u64() {
|
||||||
@ -57,12 +76,14 @@ impl Primitive {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<BigDecimal> for Primitive {
|
impl From<BigDecimal> for Primitive {
|
||||||
|
/// Helper to convert from decimals to a Primitive value
|
||||||
fn from(decimal: BigDecimal) -> Primitive {
|
fn from(decimal: BigDecimal) -> Primitive {
|
||||||
Primitive::Decimal(decimal)
|
Primitive::Decimal(decimal)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<f64> for Primitive {
|
impl From<f64> for Primitive {
|
||||||
|
/// Helper to convert from 64-bit float to a Primitive value
|
||||||
fn from(float: f64) -> Primitive {
|
fn from(float: f64) -> Primitive {
|
||||||
if let Some(f) = BigDecimal::from_f64(float) {
|
if let Some(f) = BigDecimal::from_f64(float) {
|
||||||
Primitive::Decimal(f)
|
Primitive::Decimal(f)
|
||||||
@ -73,6 +94,7 @@ impl From<f64> for Primitive {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ShellTypeName for Primitive {
|
impl ShellTypeName for Primitive {
|
||||||
|
/// Get the name of the type of a Primitive value
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
Primitive::Nothing => "nothing",
|
Primitive::Nothing => "nothing",
|
||||||
@ -95,6 +117,7 @@ impl ShellTypeName for Primitive {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Format a Primitive value into a string
|
||||||
pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> String {
|
pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> String {
|
||||||
match primitive {
|
match primitive {
|
||||||
Primitive::Nothing => String::new(),
|
Primitive::Nothing => String::new(),
|
||||||
@ -154,10 +177,11 @@ pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> S
|
|||||||
}
|
}
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
Primitive::Binary(_) => "<binary>".to_owned(),
|
Primitive::Binary(_) => "<binary>".to_owned(),
|
||||||
Primitive::Date(d) => d.humanize(),
|
Primitive::Date(d) => format_date(d),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Format a duration in seconds into a string
|
||||||
pub fn format_duration(sec: u64) -> String {
|
pub fn format_duration(sec: u64) -> String {
|
||||||
let (minutes, seconds) = (sec / 60, sec % 60);
|
let (minutes, seconds) = (sec / 60, sec % 60);
|
||||||
let (hours, minutes) = (minutes / 60, minutes % 60);
|
let (hours, minutes) = (minutes / 60, minutes % 60);
|
||||||
@ -171,3 +195,68 @@ pub fn format_duration(sec: u64) -> String {
|
|||||||
(d, h, m, s) => format!("{}:{:02}:{:02}:{:02}", d, h, m, s),
|
(d, h, m, s) => format!("{}:{:02}:{:02}:{:02}", d, h, m, s),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Format a UTC date value into a humanized string (eg "1 week ago" instead of a formal date string)
|
||||||
|
pub fn format_date(d: &DateTime<Utc>) -> String {
|
||||||
|
let utc: DateTime<Utc> = Utc::now();
|
||||||
|
|
||||||
|
let duration = utc.signed_duration_since(*d);
|
||||||
|
|
||||||
|
if duration.num_weeks() >= 52 {
|
||||||
|
let num_years = duration.num_weeks() / 52;
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"{} year{} ago",
|
||||||
|
num_years,
|
||||||
|
if num_years == 1 { "" } else { "s" }
|
||||||
|
)
|
||||||
|
} else if duration.num_weeks() >= 4 {
|
||||||
|
let num_months = duration.num_weeks() / 4;
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"{} month{} ago",
|
||||||
|
num_months,
|
||||||
|
if num_months == 1 { "" } else { "s" }
|
||||||
|
)
|
||||||
|
} else if duration.num_weeks() >= 1 {
|
||||||
|
let num_weeks = duration.num_weeks();
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"{} week{} ago",
|
||||||
|
num_weeks,
|
||||||
|
if num_weeks == 1 { "" } else { "s" }
|
||||||
|
)
|
||||||
|
} else if duration.num_days() >= 1 {
|
||||||
|
let num_days = duration.num_days();
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"{} day{} ago",
|
||||||
|
num_days,
|
||||||
|
if num_days == 1 { "" } else { "s" }
|
||||||
|
)
|
||||||
|
} else if duration.num_hours() >= 1 {
|
||||||
|
let num_hours = duration.num_hours();
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"{} hour{} ago",
|
||||||
|
num_hours,
|
||||||
|
if num_hours == 1 { "" } else { "s" }
|
||||||
|
)
|
||||||
|
} else if duration.num_minutes() >= 1 {
|
||||||
|
let num_minutes = duration.num_minutes();
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"{} min{} ago",
|
||||||
|
num_minutes,
|
||||||
|
if num_minutes == 1 { "" } else { "s" }
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
let num_seconds = duration.num_seconds();
|
||||||
|
|
||||||
|
format!(
|
||||||
|
"{} sec{} ago",
|
||||||
|
num_seconds,
|
||||||
|
if num_seconds == 1 { "" } else { "s" }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -3,6 +3,8 @@ use derive_new::new;
|
|||||||
use nu_source::{b, DebugDocBuilder, Spanned};
|
use nu_source::{b, DebugDocBuilder, Spanned};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// The two types of ways to include a range end. Inclusive means to include the value (eg 1..3 inclusive would include the 3 value).
|
||||||
|
/// Exclusive excludes the value (eg 1..3 exclusive does not include 3 value)
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
||||||
pub enum RangeInclusion {
|
pub enum RangeInclusion {
|
||||||
Inclusive,
|
Inclusive,
|
||||||
@ -10,6 +12,7 @@ pub enum RangeInclusion {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl RangeInclusion {
|
impl RangeInclusion {
|
||||||
|
/// Get a RangeInclusion left bracket ready for pretty printing
|
||||||
pub fn debug_left_bracket(self) -> DebugDocBuilder {
|
pub fn debug_left_bracket(self) -> DebugDocBuilder {
|
||||||
b::delimiter(match self {
|
b::delimiter(match self {
|
||||||
RangeInclusion::Exclusive => "(",
|
RangeInclusion::Exclusive => "(",
|
||||||
@ -17,6 +20,7 @@ impl RangeInclusion {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get a RangeInclusion right bracket ready for pretty printing
|
||||||
pub fn debug_right_bracket(self) -> DebugDocBuilder {
|
pub fn debug_right_bracket(self) -> DebugDocBuilder {
|
||||||
b::delimiter(match self {
|
b::delimiter(match self {
|
||||||
RangeInclusion::Exclusive => ")",
|
RangeInclusion::Exclusive => ")",
|
||||||
@ -25,6 +29,7 @@ impl RangeInclusion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The range definition, holding the starting and end point of the range
|
||||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize, new)]
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize, new)]
|
||||||
pub struct Range {
|
pub struct Range {
|
||||||
pub from: (Spanned<Primitive>, RangeInclusion),
|
pub from: (Spanned<Primitive>, RangeInclusion),
|
||||||
|
@ -2,6 +2,7 @@ use bigdecimal::BigDecimal;
|
|||||||
use num_traits::cast::FromPrimitive;
|
use num_traits::cast::FromPrimitive;
|
||||||
use num_traits::cast::ToPrimitive;
|
use num_traits::cast::ToPrimitive;
|
||||||
|
|
||||||
|
/// Enable big decimal serialization by providing a `serialize` function
|
||||||
pub fn serialize<S>(big_decimal: &BigDecimal, serializer: S) -> Result<S::Ok, S::Error>
|
pub fn serialize<S>(big_decimal: &BigDecimal, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
where
|
where
|
||||||
S: serde::Serializer,
|
S: serde::Serializer,
|
||||||
@ -14,6 +15,7 @@ where
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Enable big decimal deserialization by providing a `deserialize` function
|
||||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<BigDecimal, D::Error>
|
pub fn deserialize<'de, D>(deserializer: D) -> Result<BigDecimal, D::Error>
|
||||||
where
|
where
|
||||||
D: serde::Deserializer<'de>,
|
D: serde::Deserializer<'de>,
|
||||||
|
@ -2,6 +2,7 @@ use num_bigint::BigInt;
|
|||||||
use num_traits::cast::FromPrimitive;
|
use num_traits::cast::FromPrimitive;
|
||||||
use num_traits::cast::ToPrimitive;
|
use num_traits::cast::ToPrimitive;
|
||||||
|
|
||||||
|
/// Enable big int serialization by providing a `serialize` function
|
||||||
pub fn serialize<S>(big_int: &BigInt, serializer: S) -> Result<S::Ok, S::Error>
|
pub fn serialize<S>(big_int: &BigInt, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
where
|
where
|
||||||
S: serde::Serializer,
|
S: serde::Serializer,
|
||||||
@ -14,6 +15,7 @@ where
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Enable big int deserialization by providing a `deserialize` function
|
||||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<BigInt, D::Error>
|
pub fn deserialize<'de, D>(deserializer: D) -> Result<BigInt, D::Error>
|
||||||
where
|
where
|
||||||
D: serde::Deserializer<'de>,
|
D: serde::Deserializer<'de>,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-source"
|
name = "nu-source"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "A source string characterizer for Nushell"
|
description = "A source string characterizer for Nushell"
|
||||||
@ -20,4 +20,4 @@ termcolor = "1.0.5"
|
|||||||
pretty = "0.5.2"
|
pretty = "0.5.2"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
30
crates/nu-source/README.md
Normal file
30
crates/nu-source/README.md
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# nu-source
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The `nu-source` crate contains types and traits used for keeping track of _metadata_ about values being processed.
|
||||||
|
Nu uses `Tag`s to keep track of where a value came from, an `AnchorLocation`,
|
||||||
|
as well as positional information about the value, a `Span`.
|
||||||
|
An `AchorLocation` can be a `Url`, `File`, or `Source` text that a value was parsed from.
|
||||||
|
The source `Text` is special in that it is a type similar to a `String` that comes with the ability to be cheaply cloned.
|
||||||
|
A `Span` keeps track of a value's `start` and `end` positions.
|
||||||
|
These types make up the metadata for a value and are wrapped up together in a `Tagged` struct,
|
||||||
|
which holds everything needed to track and locate a value.
|
||||||
|
|
||||||
|
|
||||||
|
Nu's metadata system can be seen when reporting errors.
|
||||||
|
In the following example Nu is able to report to the user where the typo of a column originated from.
|
||||||
|
|
||||||
|
```
|
||||||
|
1 | ls | get typ
|
||||||
|
| ^^^ did you mean 'type'?
|
||||||
|
```
|
||||||
|
|
||||||
|
In addition to metadata tracking, `nu-source` also contains types and traits related to debugging, tracing, and formatting the metadata and values it processes.
|
||||||
|
|
||||||
|
## Other Resources
|
||||||
|
- [Nushell Github Project](https://github.com/nushell): Contains all projects in the Nushell ecosystem such as the source code to Nushell as well as website and books.
|
||||||
|
- [Nushell Git Repository](https://github.com/nushell/nushell): A direct link to the source git repository for Nushell
|
||||||
|
- [Nushell Contributor Book](https://github.com/nushell/contributor-book): An overview of topics about Nushell to help you get started contributing to the project.
|
||||||
|
- [Discord Channel](https://discordapp.com/invite/NtAbbGn)
|
||||||
|
- [Twitter](https://twitter.com/nu_shell)
|
@ -6,10 +6,11 @@ mod tracable;
|
|||||||
|
|
||||||
pub use self::meta::{
|
pub use self::meta::{
|
||||||
span_for_spanned_list, tag_for_tagged_list, AnchorLocation, HasFallibleSpan, HasSpan, HasTag,
|
span_for_spanned_list, tag_for_tagged_list, AnchorLocation, HasFallibleSpan, HasSpan, HasTag,
|
||||||
Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem,
|
IntoSpanned, Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem,
|
||||||
};
|
};
|
||||||
pub use self::pretty::{
|
pub use self::pretty::{
|
||||||
b, DebugDoc, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource, ShellAnnotation,
|
b, DebugDoc, DebugDocBuilder, PrettyDebug, PrettyDebugRefineKind, PrettyDebugWithSource,
|
||||||
|
ShellAnnotation,
|
||||||
};
|
};
|
||||||
pub use self::term_colored::TermColored;
|
pub use self::term_colored::TermColored;
|
||||||
pub use self::text::Text;
|
pub use self::text::Text;
|
||||||
|
@ -8,17 +8,23 @@ use serde::Deserialize;
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
/// Anchors represent a location that a value originated from. The value may have been loaded from a file, fetched from a website, or parsed from some text
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
pub enum AnchorLocation {
|
pub enum AnchorLocation {
|
||||||
|
/// The originating site where the value was first found
|
||||||
Url(String),
|
Url(String),
|
||||||
|
/// The original file where the value was loaded from
|
||||||
File(String),
|
File(String),
|
||||||
|
/// The text where the value was parsed from
|
||||||
Source(Text),
|
Source(Text),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait HasTag {
|
pub trait HasTag {
|
||||||
|
/// Get the associated metadata
|
||||||
fn tag(&self) -> Tag;
|
fn tag(&self) -> Tag;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A wrapper type that attaches a Span to a value
|
||||||
#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||||
pub struct Spanned<T> {
|
pub struct Spanned<T> {
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
@ -26,6 +32,7 @@ pub struct Spanned<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Spanned<T> {
|
impl<T> Spanned<T> {
|
||||||
|
/// Allows mapping over a Spanned value
|
||||||
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Spanned<U> {
|
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Spanned<U> {
|
||||||
let span = self.span;
|
let span = self.span;
|
||||||
|
|
||||||
@ -35,6 +42,7 @@ impl<T> Spanned<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Spanned<String> {
|
impl Spanned<String> {
|
||||||
|
/// Iterates over the contained String
|
||||||
pub fn items<'a, U>(
|
pub fn items<'a, U>(
|
||||||
items: impl Iterator<Item = &'a Spanned<String>>,
|
items: impl Iterator<Item = &'a Spanned<String>>,
|
||||||
) -> impl Iterator<Item = &'a str> {
|
) -> impl Iterator<Item = &'a str> {
|
||||||
@ -43,6 +51,7 @@ impl Spanned<String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Spanned<String> {
|
impl Spanned<String> {
|
||||||
|
/// Borrows the contained String
|
||||||
pub fn borrow_spanned(&self) -> Spanned<&str> {
|
pub fn borrow_spanned(&self) -> Spanned<&str> {
|
||||||
let span = self.span;
|
let span = self.span;
|
||||||
self.item[..].spanned(span)
|
self.item[..].spanned(span)
|
||||||
@ -50,6 +59,7 @@ impl Spanned<String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait SpannedItem: Sized {
|
pub trait SpannedItem: Sized {
|
||||||
|
/// Converts a value into a Spanned value
|
||||||
fn spanned(self, span: impl Into<Span>) -> Spanned<Self> {
|
fn spanned(self, span: impl Into<Span>) -> Spanned<Self> {
|
||||||
Spanned {
|
Spanned {
|
||||||
item: self,
|
item: self,
|
||||||
@ -57,6 +67,7 @@ pub trait SpannedItem: Sized {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Converts a value into a Spanned value, using an unknown Span
|
||||||
fn spanned_unknown(self) -> Spanned<Self> {
|
fn spanned_unknown(self) -> Spanned<Self> {
|
||||||
Spanned {
|
Spanned {
|
||||||
item: self,
|
item: self,
|
||||||
@ -69,11 +80,13 @@ impl<T> SpannedItem for T {}
|
|||||||
impl<T> std::ops::Deref for Spanned<T> {
|
impl<T> std::ops::Deref for Spanned<T> {
|
||||||
type Target = T;
|
type Target = T;
|
||||||
|
|
||||||
|
/// Shorthand to deref to the contained value
|
||||||
fn deref(&self) -> &T {
|
fn deref(&self) -> &T {
|
||||||
&self.item
|
&self.item
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A wrapper type that attaches a Tag to a value
|
||||||
#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||||
pub struct Tagged<T> {
|
pub struct Tagged<T> {
|
||||||
pub tag: Tag,
|
pub tag: Tag,
|
||||||
@ -81,29 +94,34 @@ pub struct Tagged<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Tagged<String> {
|
impl Tagged<String> {
|
||||||
|
/// Allows borrowing the contained string slice as a spanned value
|
||||||
pub fn borrow_spanned(&self) -> Spanned<&str> {
|
pub fn borrow_spanned(&self) -> Spanned<&str> {
|
||||||
let span = self.tag.span;
|
let span = self.tag.span;
|
||||||
self.item[..].spanned(span)
|
self.item[..].spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Allows borrowing the contained string slice as a tagged value
|
||||||
pub fn borrow_tagged(&self) -> Tagged<&str> {
|
pub fn borrow_tagged(&self) -> Tagged<&str> {
|
||||||
self.item[..].tagged(self.tag.clone())
|
self.item[..].tagged(self.tag.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Tagged<Vec<T>> {
|
impl<T> Tagged<Vec<T>> {
|
||||||
|
/// Iterates over the contained value(s)
|
||||||
pub fn items(&self) -> impl Iterator<Item = &T> {
|
pub fn items(&self) -> impl Iterator<Item = &T> {
|
||||||
self.item.iter()
|
self.item.iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> HasTag for Tagged<T> {
|
impl<T> HasTag for Tagged<T> {
|
||||||
|
/// Helper for getting the Tag from the Tagged value
|
||||||
fn tag(&self) -> Tag {
|
fn tag(&self) -> Tag {
|
||||||
self.tag.clone()
|
self.tag.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<Path> for Tagged<PathBuf> {
|
impl AsRef<Path> for Tagged<PathBuf> {
|
||||||
|
/// Gets the reference to the contained Path
|
||||||
fn as_ref(&self) -> &Path {
|
fn as_ref(&self) -> &Path {
|
||||||
self.item.as_ref()
|
self.item.as_ref()
|
||||||
}
|
}
|
||||||
@ -236,11 +254,14 @@ impl From<&std::ops::Range<usize>> for Span {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The set of metadata that can be associated with a value
|
||||||
#[derive(
|
#[derive(
|
||||||
Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
|
Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
|
||||||
)]
|
)]
|
||||||
pub struct Tag {
|
pub struct Tag {
|
||||||
|
/// The original source for this value
|
||||||
pub anchor: Option<AnchorLocation>,
|
pub anchor: Option<AnchorLocation>,
|
||||||
|
/// The span in the source text for the command that created this value
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -393,7 +414,6 @@ impl Tag {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {
|
pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {
|
||||||
let first = iter.next();
|
let first = iter.next();
|
||||||
|
|
||||||
@ -410,7 +430,6 @@ pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn span_for_spanned_list(mut iter: impl Iterator<Item = Span>) -> Span {
|
pub fn span_for_spanned_list(mut iter: impl Iterator<Item = Span>) -> Span {
|
||||||
let first = iter.next();
|
let first = iter.next();
|
||||||
|
|
||||||
@ -471,6 +490,16 @@ impl Span {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn contains(&self, pos: usize) -> bool {
|
||||||
|
self.start <= pos && self.end >= pos
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn since(&self, other: impl Into<Span>) -> Span {
|
||||||
|
let other = other.into();
|
||||||
|
|
||||||
|
Span::new(other.start, self.end)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn until(&self, other: impl Into<Span>) -> Span {
|
pub fn until(&self, other: impl Into<Span>) -> Span {
|
||||||
let other = other.into();
|
let other = other.into();
|
||||||
|
|
||||||
@ -543,29 +572,66 @@ impl language_reporting::ReportingSpan for Span {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait HasSpan: PrettyDebugWithSource {
|
pub trait IntoSpanned {
|
||||||
fn span(&self) -> Span;
|
type Output: HasFallibleSpan;
|
||||||
|
|
||||||
|
fn into_spanned(self, span: impl Into<Span>) -> Self::Output;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait HasFallibleSpan: PrettyDebugWithSource {
|
impl<T: HasFallibleSpan> IntoSpanned for T {
|
||||||
fn maybe_span(&self) -> Option<Span>;
|
type Output = T;
|
||||||
}
|
fn into_spanned(self, _span: impl Into<Span>) -> Self::Output {
|
||||||
|
self
|
||||||
impl<T: HasSpan> HasFallibleSpan for T {
|
|
||||||
fn maybe_span(&self) -> Option<Span> {
|
|
||||||
Some(HasSpan::span(self))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> HasSpan for Spanned<T>
|
pub trait HasSpan {
|
||||||
|
fn span(&self) -> Span;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, E> HasSpan for Result<T, E>
|
||||||
where
|
where
|
||||||
Spanned<T>: PrettyDebugWithSource,
|
T: HasSpan,
|
||||||
{
|
{
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
match self {
|
||||||
|
Result::Ok(val) => val.span(),
|
||||||
|
Result::Err(_) => Span::unknown(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> HasSpan for Spanned<T> {
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
self.span
|
self.span
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait HasFallibleSpan {
|
||||||
|
fn maybe_span(&self) -> Option<Span>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasFallibleSpan for bool {
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasFallibleSpan for () {
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> HasFallibleSpan for T
|
||||||
|
where
|
||||||
|
T: HasSpan,
|
||||||
|
{
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
Some(HasSpan::span(self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Option<Span> {
|
impl PrettyDebugWithSource for Option<Span> {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
@ -584,8 +650,8 @@ impl HasFallibleSpan for Option<Span> {
|
|||||||
impl PrettyDebugWithSource for Span {
|
impl PrettyDebugWithSource for Span {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::typed(
|
b::typed(
|
||||||
"spanned",
|
"span",
|
||||||
b::keyword("for") + b::space() + b::description(format!("{:?}", source)),
|
b::keyword("for") + b::space() + b::description(format!("{:?}", self.slice(source))),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -603,15 +669,12 @@ where
|
|||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
None => b::description("nothing"),
|
None => b::description("nothing"),
|
||||||
Some(v) => v.pretty_debug(source),
|
Some(v) => v.pretty_debug(v.span.slice(source)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> HasFallibleSpan for Option<Spanned<T>>
|
impl<T> HasFallibleSpan for Option<Spanned<T>> {
|
||||||
where
|
|
||||||
Spanned<T>: PrettyDebugWithSource,
|
|
||||||
{
|
|
||||||
fn maybe_span(&self) -> Option<Span> {
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
match self {
|
match self {
|
||||||
None => None,
|
None => None,
|
||||||
@ -632,10 +695,7 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> HasFallibleSpan for Option<Tagged<T>>
|
impl<T> HasFallibleSpan for Option<Tagged<T>> {
|
||||||
where
|
|
||||||
Tagged<T>: PrettyDebugWithSource,
|
|
||||||
{
|
|
||||||
fn maybe_span(&self) -> Option<Span> {
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
match self {
|
match self {
|
||||||
None => None,
|
None => None,
|
||||||
@ -644,10 +704,7 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> HasSpan for Tagged<T>
|
impl<T> HasSpan for Tagged<T> {
|
||||||
where
|
|
||||||
Tagged<T>: PrettyDebugWithSource,
|
|
||||||
{
|
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
self.tag.span
|
self.tag.span
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use crate::meta::Spanned;
|
||||||
use crate::term_colored::TermColored;
|
use crate::term_colored::TermColored;
|
||||||
use crate::text::Text;
|
use crate::text::Text;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
@ -29,7 +30,7 @@ impl From<ShellAnnotation> for ColorSpec {
|
|||||||
.set_intense(false)
|
.set_intense(false)
|
||||||
.clone(),
|
.clone(),
|
||||||
ShellStyle::Key => ColorSpec::new()
|
ShellStyle::Key => ColorSpec::new()
|
||||||
.set_fg(Some(Color::Black))
|
.set_fg(Some(Color::Green))
|
||||||
.set_intense(true)
|
.set_intense(true)
|
||||||
.clone(),
|
.clone(),
|
||||||
ShellStyle::Value => ColorSpec::new()
|
ShellStyle::Value => ColorSpec::new()
|
||||||
@ -37,7 +38,7 @@ impl From<ShellAnnotation> for ColorSpec {
|
|||||||
.set_intense(true)
|
.set_intense(true)
|
||||||
.clone(),
|
.clone(),
|
||||||
ShellStyle::Equals => ColorSpec::new()
|
ShellStyle::Equals => ColorSpec::new()
|
||||||
.set_fg(Some(Color::Black))
|
.set_fg(Some(Color::Green))
|
||||||
.set_intense(true)
|
.set_intense(true)
|
||||||
.clone(),
|
.clone(),
|
||||||
ShellStyle::Kind => ColorSpec::new().set_fg(Some(Color::Cyan)).clone(),
|
ShellStyle::Kind => ColorSpec::new().set_fg(Some(Color::Cyan)).clone(),
|
||||||
@ -56,7 +57,7 @@ impl From<ShellAnnotation> for ColorSpec {
|
|||||||
.set_intense(true)
|
.set_intense(true)
|
||||||
.clone(),
|
.clone(),
|
||||||
ShellStyle::Description => ColorSpec::new()
|
ShellStyle::Description => ColorSpec::new()
|
||||||
.set_fg(Some(Color::Black))
|
.set_fg(Some(Color::Green))
|
||||||
.set_intense(true)
|
.set_intense(true)
|
||||||
.clone(),
|
.clone(),
|
||||||
ShellStyle::Error => ColorSpec::new()
|
ShellStyle::Error => ColorSpec::new()
|
||||||
@ -98,6 +99,21 @@ pub struct DebugDocBuilder {
|
|||||||
pub inner: PrettyDebugDocBuilder,
|
pub inner: PrettyDebugDocBuilder,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for bool {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
match self {
|
||||||
|
true => b::primitive("true"),
|
||||||
|
false => b::primitive("false"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for () {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
b::primitive("nothing")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyDebug for DebugDocBuilder {
|
impl PrettyDebug for DebugDocBuilder {
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
self.clone()
|
self.clone()
|
||||||
@ -156,7 +172,7 @@ impl DebugDocBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn typed(kind: &str, value: DebugDocBuilder) -> DebugDocBuilder {
|
pub fn typed(kind: &str, value: DebugDocBuilder) -> DebugDocBuilder {
|
||||||
b::delimit("(", b::kind(kind) + b::space() + value.group(), ")").group()
|
b::kind(kind) + b::delimit("[", value.group(), "]")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn subtyped(
|
pub fn subtyped(
|
||||||
@ -340,9 +356,23 @@ pub struct DebugDoc {
|
|||||||
pub inner: PrettyDebugDoc,
|
pub inner: PrettyDebugDoc,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub enum PrettyDebugRefineKind {
|
||||||
|
ContextFree,
|
||||||
|
WithContext,
|
||||||
|
}
|
||||||
|
|
||||||
pub trait PrettyDebugWithSource: Sized {
|
pub trait PrettyDebugWithSource: Sized {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder;
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder;
|
||||||
|
|
||||||
|
fn refined_pretty_debug(
|
||||||
|
&self,
|
||||||
|
_refine: PrettyDebugRefineKind,
|
||||||
|
source: &str,
|
||||||
|
) -> DebugDocBuilder {
|
||||||
|
self.pretty_debug(source)
|
||||||
|
}
|
||||||
|
|
||||||
// This is a transitional convenience method
|
// This is a transitional convenience method
|
||||||
fn debug(&self, source: impl Into<Text>) -> String
|
fn debug(&self, source: impl Into<Text>) -> String
|
||||||
where
|
where
|
||||||
@ -359,12 +389,27 @@ pub trait PrettyDebugWithSource: Sized {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T: PrettyDebug> PrettyDebug for Spanned<T> {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
self.item.pretty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T: PrettyDebug> PrettyDebugWithSource for T {
|
impl<T: PrettyDebug> PrettyDebugWithSource for T {
|
||||||
fn pretty_debug(&self, _source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, _source: &str) -> DebugDocBuilder {
|
||||||
self.pretty()
|
self.pretty()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T: PrettyDebugWithSource, E> PrettyDebugWithSource for Result<T, E> {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
match self {
|
||||||
|
Err(_) => b::error("error"),
|
||||||
|
Ok(val) => val.pretty_debug(source),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct DebuggableWithSource<T: PrettyDebugWithSource> {
|
pub struct DebuggableWithSource<T: PrettyDebugWithSource> {
|
||||||
inner: T,
|
inner: T,
|
||||||
source: Text,
|
source: Text,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-test-support"
|
name = "nu-test-support"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "A source string characterizer for Nushell"
|
description = "A source string characterizer for Nushell"
|
||||||
@ -10,8 +10,16 @@ license = "MIT"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
nu-parser = { path = "../nu-parser", version = "0.9.0" }
|
||||||
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
|
|
||||||
app_dirs = "1.2.1"
|
app_dirs = "1.2.1"
|
||||||
dunce = "1.0.0"
|
dunce = "1.0.0"
|
||||||
getset = "0.0.9"
|
getset = "0.0.9"
|
||||||
glob = "0.3.0"
|
glob = "0.3.0"
|
||||||
tempfile = "3.1.0"
|
tempfile = "3.1.0"
|
||||||
|
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
3
crates/nu-test-support/build.rs
Normal file
3
crates/nu-test-support/build.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
nu_build::build()
|
||||||
|
}
|
44
crates/nu-test-support/src/bins/chop.rs
Normal file
44
crates/nu-test-support/src/bins/chop.rs
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
use std::io::{self, BufRead};
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
if did_chop_arguments() {
|
||||||
|
// we are done and don't care about standard input.
|
||||||
|
std::process::exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// if no arguments given, chop from standard input and exit.
|
||||||
|
let stdin = io::stdin();
|
||||||
|
let mut input = stdin.lock().lines();
|
||||||
|
|
||||||
|
if let Some(Ok(given)) = input.next() {
|
||||||
|
if !given.is_empty() {
|
||||||
|
println!("{}", chop(&given));
|
||||||
|
std::process::exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::process::exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chop(word: &str) -> &str {
|
||||||
|
let to = word.len() - 1;
|
||||||
|
|
||||||
|
&word[..to]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn did_chop_arguments() -> bool {
|
||||||
|
let args: Vec<String> = std::env::args().collect();
|
||||||
|
|
||||||
|
if args.len() > 1 {
|
||||||
|
let mut arguments = args.iter();
|
||||||
|
arguments.next();
|
||||||
|
|
||||||
|
for arg in arguments {
|
||||||
|
println!("{}", chop(arg));
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
17
crates/nu-test-support/src/bins/cococo.rs
Normal file
17
crates/nu-test-support/src/bins/cococo.rs
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
fn main() {
|
||||||
|
let args: Vec<String> = std::env::args().collect();
|
||||||
|
|
||||||
|
if args.len() > 1 {
|
||||||
|
// Write back out all the arguments passed
|
||||||
|
// if given at least 1 instead of chickens
|
||||||
|
// speaking co co co.
|
||||||
|
let mut arguments = args.iter();
|
||||||
|
arguments.next();
|
||||||
|
|
||||||
|
for arg in arguments {
|
||||||
|
println!("{}", &arg);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!("cococo");
|
||||||
|
}
|
||||||
|
}
|
3
crates/nu-test-support/src/bins/fail.rs
Normal file
3
crates/nu-test-support/src/bins/fail.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fn main() {
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
50
crates/nu-test-support/src/commands.rs
Normal file
50
crates/nu-test-support/src/commands.rs
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
use nu_parser::commands::classified::external::{ExternalArg, ExternalArgs, ExternalCommand};
|
||||||
|
use nu_source::{Span, SpannedItem, Tag, TaggedItem};
|
||||||
|
|
||||||
|
pub struct ExternalBuilder {
|
||||||
|
name: String,
|
||||||
|
args: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternalBuilder {
|
||||||
|
pub fn for_name(name: &str) -> ExternalBuilder {
|
||||||
|
ExternalBuilder {
|
||||||
|
name: name.to_string(),
|
||||||
|
args: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn arg(&mut self, value: &str) -> &mut Self {
|
||||||
|
self.args.push(value.to_string());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build(&mut self) -> ExternalCommand {
|
||||||
|
let mut path = crate::fs::binaries();
|
||||||
|
path.push(&self.name);
|
||||||
|
|
||||||
|
let name = path.to_string_lossy().to_string().spanned(Span::unknown());
|
||||||
|
|
||||||
|
let args = self
|
||||||
|
.args
|
||||||
|
.iter()
|
||||||
|
.map(|arg| {
|
||||||
|
let arg = arg.tagged(Tag::unknown());
|
||||||
|
|
||||||
|
ExternalArg {
|
||||||
|
arg: arg.to_string(),
|
||||||
|
tag: arg.tag,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
ExternalCommand {
|
||||||
|
name: name.to_string(),
|
||||||
|
name_tag: Tag::unknown(),
|
||||||
|
args: ExternalArgs {
|
||||||
|
list: args,
|
||||||
|
span: name.span,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -220,11 +220,18 @@ pub fn delete_directory_at(full_path: &str) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn executable_path() -> PathBuf {
|
pub fn executable_path() -> PathBuf {
|
||||||
let mut buf = PathBuf::new();
|
let mut path = binaries();
|
||||||
buf.push("target");
|
path.push("nu");
|
||||||
buf.push("debug");
|
path
|
||||||
buf.push("nu");
|
}
|
||||||
buf
|
|
||||||
|
pub fn binaries() -> PathBuf {
|
||||||
|
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||||
|
.parent()
|
||||||
|
.expect("Couldn't find the debug binaries directory")
|
||||||
|
.parent()
|
||||||
|
.expect("Couldn't find the debug binaries directory")
|
||||||
|
.join("target/debug")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn in_directory(str: impl AsRef<Path>) -> String {
|
pub fn in_directory(str: impl AsRef<Path>) -> String {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
pub mod commands;
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
pub mod macros;
|
pub mod macros;
|
||||||
pub mod playground;
|
pub mod playground;
|
||||||
@ -13,7 +14,17 @@ pub fn pipeline(commands: &str) -> String {
|
|||||||
.to_string()
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(tests)]
|
pub fn shell_os_paths() -> Vec<std::path::PathBuf> {
|
||||||
|
let mut original_paths = vec![];
|
||||||
|
|
||||||
|
if let Some(paths) = std::env::var_os("PATH") {
|
||||||
|
original_paths = std::env::split_paths(&paths).collect::<Vec<_>>();
|
||||||
|
}
|
||||||
|
|
||||||
|
original_paths
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::pipeline;
|
use super::pipeline;
|
||||||
|
|
||||||
|
@ -28,7 +28,25 @@ macro_rules! nu {
|
|||||||
$crate::fs::DisplayPath::display_path(&$path)
|
$crate::fs::DisplayPath::display_path(&$path)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let test_bins = $crate::fs::binaries();
|
||||||
|
let test_bins = dunce::canonicalize(&test_bins).unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"Couldn't canonicalize dummy binaries path {}: {:?}",
|
||||||
|
test_bins.display(),
|
||||||
|
e
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut paths = $crate::shell_os_paths();
|
||||||
|
paths.push(test_bins);
|
||||||
|
|
||||||
|
let paths_joined = match std::env::join_paths(paths.iter()) {
|
||||||
|
Ok(all) => all,
|
||||||
|
Err(_) => panic!("Couldn't join paths for PATH var."),
|
||||||
|
};
|
||||||
|
|
||||||
let mut process = match Command::new($crate::fs::executable_path())
|
let mut process = match Command::new($crate::fs::executable_path())
|
||||||
|
.env("PATH", paths_joined)
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.spawn()
|
.spawn()
|
||||||
@ -42,18 +60,26 @@ macro_rules! nu {
|
|||||||
.write_all(commands.as_bytes())
|
.write_all(commands.as_bytes())
|
||||||
.expect("couldn't write to stdin");
|
.expect("couldn't write to stdin");
|
||||||
|
|
||||||
|
|
||||||
let output = process
|
let output = process
|
||||||
.wait_with_output()
|
.wait_with_output()
|
||||||
.expect("couldn't read from stdout");
|
.expect("couldn't read from stdout");
|
||||||
|
|
||||||
let out = String::from_utf8_lossy(&output.stdout);
|
let out = $crate::macros::read_std(&output.stdout);
|
||||||
let out = out.replace("\r\n", "");
|
let err = $crate::macros::read_std(&output.stderr);
|
||||||
let out = out.replace("\n", "");
|
|
||||||
|
println!("=== stderr\n{}", err);
|
||||||
|
|
||||||
out
|
out
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn read_std(std: &[u8]) -> String {
|
||||||
|
let out = String::from_utf8_lossy(std);
|
||||||
|
let out = out.lines().skip(1).collect::<Vec<_>>().join("\n");
|
||||||
|
let out = out.replace("\r\n", "");
|
||||||
|
out.replace("\n", "")
|
||||||
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! nu_error {
|
macro_rules! nu_error {
|
||||||
(cwd: $cwd:expr, $path:expr, $($part:expr),*) => {{
|
(cwd: $cwd:expr, $path:expr, $($part:expr),*) => {{
|
||||||
@ -84,7 +110,26 @@ macro_rules! nu_error {
|
|||||||
$crate::fs::DisplayPath::display_path(&$path)
|
$crate::fs::DisplayPath::display_path(&$path)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let test_bins = $crate::fs::binaries();
|
||||||
|
let test_bins = dunce::canonicalize(&test_bins).unwrap_or_else(|e| {
|
||||||
|
panic!(
|
||||||
|
"Couldn't canonicalize dummy binaries path {}: {:?}",
|
||||||
|
test_bins.display(),
|
||||||
|
e
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut paths = $crate::shell_os_paths();
|
||||||
|
paths.push(test_bins);
|
||||||
|
|
||||||
|
let paths_joined = match std::env::join_paths(paths.iter()) {
|
||||||
|
Ok(all) => all,
|
||||||
|
Err(_) => panic!("Couldn't join paths for PATH var."),
|
||||||
|
};
|
||||||
|
|
||||||
let mut process = Command::new($crate::fs::executable_path())
|
let mut process = Command::new($crate::fs::executable_path())
|
||||||
|
.env("PATH", paths_joined)
|
||||||
|
.stdout(Stdio::piped())
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
.stderr(Stdio::piped())
|
.stderr(Stdio::piped())
|
||||||
.spawn()
|
.spawn()
|
||||||
@ -97,7 +142,7 @@ macro_rules! nu_error {
|
|||||||
|
|
||||||
let output = process
|
let output = process
|
||||||
.wait_with_output()
|
.wait_with_output()
|
||||||
.expect("couldn't read from stderr");
|
.expect("couldn't read from stdout/stderr");
|
||||||
|
|
||||||
let out = String::from_utf8_lossy(&output.stderr);
|
let out = String::from_utf8_lossy(&output.stderr);
|
||||||
out.into_owned()
|
out.into_owned()
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-value-ext"
|
name = "nu-value-ext"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "Extension traits for values in Nushell"
|
description = "Extension traits for values in Nushell"
|
||||||
@ -10,13 +10,14 @@ license = "MIT"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.8.0" }
|
nu-parser = { path = "../nu-parser", version = "0.9.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
|
|
||||||
num-traits = "0.2.10"
|
num-traits = "0.2.10"
|
||||||
itertools = "0.8.2"
|
itertools = "0.8.2"
|
||||||
|
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
@ -8,6 +8,8 @@ use nu_source::{HasSpan, PrettyDebug, Spanned, SpannedItem, Tag, Tagged, TaggedI
|
|||||||
use num_traits::cast::ToPrimitive;
|
use num_traits::cast::ToPrimitive;
|
||||||
|
|
||||||
pub trait ValueExt {
|
pub trait ValueExt {
|
||||||
|
fn row_entries(&self) -> RowValueIter<'_>;
|
||||||
|
fn table_entries(&self) -> TableValueIter<'_>;
|
||||||
fn into_parts(self) -> (UntaggedValue, Tag);
|
fn into_parts(self) -> (UntaggedValue, Tag);
|
||||||
fn get_data(&self, desc: &str) -> MaybeOwned<'_, Value>;
|
fn get_data(&self, desc: &str) -> MaybeOwned<'_, Value>;
|
||||||
fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value>;
|
fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value>;
|
||||||
@ -39,6 +41,14 @@ pub trait ValueExt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ValueExt for Value {
|
impl ValueExt for Value {
|
||||||
|
fn row_entries(&self) -> RowValueIter<'_> {
|
||||||
|
row_entries(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn table_entries(&self) -> TableValueIter<'_> {
|
||||||
|
table_entries(self)
|
||||||
|
}
|
||||||
|
|
||||||
fn into_parts(self) -> (UntaggedValue, Tag) {
|
fn into_parts(self) -> (UntaggedValue, Tag) {
|
||||||
(self.value, self.tag)
|
(self.value, self.tag)
|
||||||
}
|
}
|
||||||
@ -398,7 +408,19 @@ pub fn as_string(value: &Value) -> Result<String, ShellError> {
|
|||||||
UntaggedValue::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
|
UntaggedValue::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
|
||||||
UntaggedValue::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
|
UntaggedValue::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
|
||||||
UntaggedValue::Primitive(Primitive::ColumnPath(path)) => {
|
UntaggedValue::Primitive(Primitive::ColumnPath(path)) => {
|
||||||
Ok(path.iter().map(|member| member.display()).join("."))
|
let joined = path
|
||||||
|
.iter()
|
||||||
|
.map(|member| match &member.unspanned {
|
||||||
|
UnspannedPathMember::String(name) => name.to_string(),
|
||||||
|
UnspannedPathMember::Int(n) => format!("{}", n),
|
||||||
|
})
|
||||||
|
.join(".");
|
||||||
|
|
||||||
|
if joined.contains(' ') {
|
||||||
|
Ok(format!("\"{}\"", joined))
|
||||||
|
} else {
|
||||||
|
Ok(joined)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: this should definitely be more general with better errors
|
// TODO: this should definitely be more general with better errors
|
||||||
@ -512,3 +534,52 @@ pub(crate) fn get_mut_data_by_member<'value>(
|
|||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum RowValueIter<'a> {
|
||||||
|
Empty,
|
||||||
|
Entries(indexmap::map::Iter<'a, String, Value>),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum TableValueIter<'a> {
|
||||||
|
Empty,
|
||||||
|
Entries(std::slice::Iter<'a, Value>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for RowValueIter<'a> {
|
||||||
|
type Item = (&'a String, &'a Value);
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
match self {
|
||||||
|
RowValueIter::Empty => None,
|
||||||
|
RowValueIter::Entries(iter) => iter.next(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for TableValueIter<'a> {
|
||||||
|
type Item = &'a Value;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
match self {
|
||||||
|
TableValueIter::Empty => None,
|
||||||
|
TableValueIter::Entries(iter) => iter.next(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn table_entries(value: &Value) -> TableValueIter<'_> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Table(t) => TableValueIter::Entries(t.iter()),
|
||||||
|
_ => TableValueIter::Empty,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn row_entries(value: &Value) -> RowValueIter<'_> {
|
||||||
|
match &value.value {
|
||||||
|
UntaggedValue::Row(o) => {
|
||||||
|
let iter = o.entries.iter();
|
||||||
|
RowValueIter::Entries(iter)
|
||||||
|
}
|
||||||
|
_ => RowValueIter::Empty,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu_plugin_average"
|
name = "nu_plugin_average"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "An average value plugin for Nushell"
|
description = "An average value plugin for Nushell"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-plugin = { path = "../nu-plugin", version="0.8.0" }
|
nu-plugin = { path = "../nu-plugin", version = "0.9.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu_plugin_binaryview"
|
name = "nu_plugin_binaryview"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "A binary viewer plugin for Nushell"
|
description = "A binary viewer plugin for Nushell"
|
||||||
@ -8,15 +8,15 @@ license = "MIT"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ansi_term = "0.12.1"
|
ansi_term = "0.12.1"
|
||||||
crossterm = { version = "0.10.2" }
|
crossterm = { version = "0.14.2" }
|
||||||
nu-plugin = { path = "../nu-plugin", version="0.8.0" }
|
nu-plugin = { path = "../nu-plugin", version = "0.9.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
pretty-hex = "0.1.1"
|
pretty-hex = "0.1.1"
|
||||||
image = { version = "0.22.3", default_features = false, features = ["png_codec", "jpeg"] }
|
image = { version = "0.22.4", default_features = false, features = ["png_codec", "jpeg"] }
|
||||||
rawkey = "0.1.2"
|
rawkey = "0.1.2"
|
||||||
neso = "0.5.0"
|
neso = "0.5.0"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crossterm::{cursor, terminal, Attribute, RawScreen};
|
use crossterm::{style::Attribute, ExecutableCommand};
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_plugin::{serve_plugin, Plugin};
|
use nu_plugin::{serve_plugin, Plugin};
|
||||||
use nu_protocol::{outln, CallInfo, Primitive, Signature, UntaggedValue, Value};
|
use nu_protocol::{outln, CallInfo, Primitive, Signature, UntaggedValue, Value};
|
||||||
@ -71,8 +71,7 @@ impl RenderContext {
|
|||||||
let mut prev_color: Option<(u8, u8, u8)> = None;
|
let mut prev_color: Option<(u8, u8, u8)> = None;
|
||||||
let mut prev_count = 1;
|
let mut prev_count = 1;
|
||||||
|
|
||||||
let cursor = cursor();
|
let _ = std::io::stdout().execute(crossterm::cursor::MoveTo(0, 0));
|
||||||
cursor.goto(0, 0)?;
|
|
||||||
|
|
||||||
for pixel in &self.frame_buffer {
|
for pixel in &self.frame_buffer {
|
||||||
match prev_color {
|
match prev_color {
|
||||||
@ -115,8 +114,7 @@ impl RenderContext {
|
|||||||
let mut pos = 0;
|
let mut pos = 0;
|
||||||
let fb_len = self.frame_buffer.len();
|
let fb_len = self.frame_buffer.len();
|
||||||
|
|
||||||
let cursor = cursor();
|
let _ = std::io::stdout().execute(crossterm::cursor::MoveTo(0, 0));
|
||||||
cursor.goto(0, 0)?;
|
|
||||||
|
|
||||||
while pos < (fb_len - self.width) {
|
while pos < (fb_len - self.width) {
|
||||||
let top_pixel = self.frame_buffer[pos];
|
let top_pixel = self.frame_buffer[pos];
|
||||||
@ -169,12 +167,10 @@ impl RenderContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn update(&mut self) -> Result<(), Box<dyn std::error::Error>> {
|
pub fn update(&mut self) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
let terminal = terminal();
|
let terminal_size = crossterm::terminal::size().unwrap_or_else(|_| (80, 24));
|
||||||
let terminal_size = terminal.terminal_size();
|
|
||||||
|
|
||||||
if (self.width != terminal_size.0 as usize) || (self.height != terminal_size.1 as usize) {
|
if (self.width != terminal_size.0 as usize) || (self.height != terminal_size.1 as usize) {
|
||||||
let cursor = cursor();
|
let _ = std::io::stdout().execute(crossterm::cursor::Hide);
|
||||||
cursor.hide()?;
|
|
||||||
|
|
||||||
self.width = terminal_size.0 as usize;
|
self.width = terminal_size.0 as usize;
|
||||||
self.height = if self.lores_mode {
|
self.height = if self.lores_mode {
|
||||||
@ -305,10 +301,9 @@ pub fn view_contents(
|
|||||||
|
|
||||||
render_context.flush()?;
|
render_context.flush()?;
|
||||||
|
|
||||||
let cursor = cursor();
|
let _ = std::io::stdout().execute(crossterm::cursor::Show);
|
||||||
let _ = cursor.show();
|
|
||||||
|
|
||||||
let _ = RawScreen::disable_raw_mode();
|
let _ = crossterm::terminal::disable_raw_mode();
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -340,11 +335,8 @@ pub fn view_contents_interactive(
|
|||||||
|
|
||||||
nes.reset();
|
nes.reset();
|
||||||
|
|
||||||
if let Ok(_raw) = RawScreen::into_raw_mode() {
|
if let Ok(_raw) = crossterm::terminal::enable_raw_mode() {
|
||||||
let mut render_context: RenderContext = RenderContext::blank(lores_mode);
|
let mut render_context: RenderContext = RenderContext::blank(lores_mode);
|
||||||
let input = crossterm::input();
|
|
||||||
let _ = input.read_async();
|
|
||||||
let cursor = cursor();
|
|
||||||
|
|
||||||
let buttons = vec![
|
let buttons = vec![
|
||||||
KeyCode::Alt,
|
KeyCode::Alt,
|
||||||
@ -357,7 +349,7 @@ pub fn view_contents_interactive(
|
|||||||
KeyCode::RightArrow,
|
KeyCode::RightArrow,
|
||||||
];
|
];
|
||||||
|
|
||||||
cursor.hide()?;
|
let _ = std::io::stdout().execute(crossterm::cursor::Hide);
|
||||||
|
|
||||||
'gameloop: loop {
|
'gameloop: loop {
|
||||||
let _ = render_context.update();
|
let _ = render_context.update();
|
||||||
@ -397,6 +389,18 @@ pub fn view_contents_interactive(
|
|||||||
nes.release_button(0, idx as u8);
|
nes.release_button(0, idx as u8);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
loop {
|
||||||
|
let x = crossterm::event::poll(std::time::Duration::from_secs(0));
|
||||||
|
match x {
|
||||||
|
Ok(true) => {
|
||||||
|
// Swallow the events so we don't queue them into the line editor
|
||||||
|
let _ = crossterm::event::read();
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -408,10 +412,9 @@ pub fn view_contents_interactive(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let cursor = cursor();
|
let _ = std::io::stdout().execute(crossterm::cursor::Show);
|
||||||
let _ = cursor.show();
|
|
||||||
|
|
||||||
let _screen = RawScreen::disable_raw_mode();
|
let _screen = crossterm::terminal::disable_raw_mode();
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,19 +1,19 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu_plugin_fetch"
|
name = "nu_plugin_fetch"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "A URL fetch plugin for Nushell"
|
description = "A URL fetch plugin for Nushell"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-plugin = { path = "../nu-plugin", version="0.8.0" }
|
nu-plugin = { path = "../nu-plugin", version = "0.9.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
||||||
surf = "1.0.3"
|
surf = "1.0.3"
|
||||||
url = "2.1.0"
|
url = "2.1.0"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu_plugin_inc"
|
name = "nu_plugin_inc"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "A version incrementer plugin for Nushell"
|
description = "A version incrementer plugin for Nushell"
|
||||||
@ -10,12 +10,12 @@ license = "MIT"
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-plugin = { path = "../nu-plugin", version="0.8.0" }
|
nu-plugin = { path = "../nu-plugin", version = "0.9.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
nu-value-ext = { path = "../nu-value-ext", version = "0.8.0" }
|
nu-value-ext = { path = "../nu-value-ext", version = "0.9.0" }
|
||||||
semver = "0.9.0"
|
semver = "0.9.0"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
@ -1,18 +1,18 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu_plugin_match"
|
name = "nu_plugin_match"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "A regex match plugin for Nushell"
|
description = "A regex match plugin for Nushell"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-plugin = { path = "../nu-plugin", version="0.8.0" }
|
nu-plugin = { path = "../nu-plugin", version = "0.9.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
||||||
regex = "1"
|
regex = "1"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu_plugin_post"
|
name = "nu_plugin_post"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "An HTTP post plugin for Nushell"
|
description = "An HTTP post plugin for Nushell"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-plugin = { path = "../nu-plugin", version="0.8.0" }
|
nu-plugin = { path = "../nu-plugin", version = "0.9.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
||||||
surf = "1.0.3"
|
surf = "1.0.3"
|
||||||
url = "2.1.0"
|
url = "2.1.0"
|
||||||
@ -19,4 +19,4 @@ base64 = "0.11"
|
|||||||
num-traits = "0.2.10"
|
num-traits = "0.2.10"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu_plugin_ps"
|
name = "nu_plugin_ps"
|
||||||
version = "0.8.0"
|
version = "0.9.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "A process list plugin for Nushell"
|
description = "A process list plugin for Nushell"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-plugin = { path = "../nu-plugin", version="0.8.0" }
|
nu-plugin = { path = "../nu-plugin", version = "0.9.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.9.0" }
|
||||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
nu-source = { path = "../nu-source", version = "0.9.0" }
|
||||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
nu-errors = { path = "../nu-errors", version = "0.9.0" }
|
||||||
|
|
||||||
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
||||||
heim = "0.0.9"
|
heim = "0.0.9"
|
||||||
@ -19,4 +19,4 @@ pin-utils = "0.1.0-alpha.4"
|
|||||||
futures-util = "0.3.1"
|
futures-util = "0.3.1"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
nu-build = { version = "0.9.0", path = "../nu-build" }
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user