forked from extern/nushell
Compare commits
132 Commits
Author | SHA1 | Date | |
---|---|---|---|
01d6287a8f | |||
0462b2db80 | |||
4cb399ed70 | |||
7ef9f7702f | |||
44a1686a76 | |||
15c6d24178 | |||
3b84e3ccfe | |||
da7d6beb22 | |||
f012eb7bdd | |||
f966394b63 | |||
889d2bb378 | |||
a2c4e485ba | |||
8860d8de8d | |||
d7b768ee9f | |||
6ea8e42331 | |||
1b784cb77a | |||
4a0ec1207c | |||
ffb2fedca9 | |||
382b1ba85f | |||
3b42655b51 | |||
e43e906f86 | |||
e51d9d0935 | |||
f57489ed92 | |||
503e521820 | |||
c317094947 | |||
243df63978 | |||
05ff102e09 | |||
cd30fac050 | |||
f589d3c795 | |||
51879d022e | |||
2260b3dda3 | |||
aa64442453 | |||
129ee45944 | |||
2fe7d105b0 | |||
136c8acba6 | |||
e92d4b2ccb | |||
6e91c96dd7 | |||
7801c03e2d | |||
763bbe1c01 | |||
0ea3527544 | |||
20dfca073f | |||
a3679f0f4e | |||
e75fdc2865 | |||
4be88ff572 | |||
992789af26 | |||
b822e13f12 | |||
cd058db046 | |||
1b3143d3d4 | |||
e31ed66610 | |||
7f18ff10b2 | |||
65ae24fbf1 | |||
b54ce921dd | |||
7614ce4b49 | |||
9d34ec9153 | |||
fd92271884 | |||
cea8fab307 | |||
2d44b7d296 | |||
faccb0627f | |||
a9cd6b4f7a | |||
81691e07c6 | |||
26f40dcabc | |||
3820fef801 | |||
392ff286b2 | |||
b6824d8b88 | |||
e09160e80d | |||
8ba5388438 | |||
30b6eac03d | |||
17ad07ce27 | |||
53911ebecd | |||
bc309705a9 | |||
1de80aeac3 | |||
1eaaf368ee | |||
36e40ebb85 | |||
3f600c5b82 | |||
fbd980f8b0 | |||
7d383421c6 | |||
aed386b3cd | |||
540cc4016e | |||
1b3a09495d | |||
b7af34371b | |||
105762e1c3 | |||
2706ae076d | |||
07ceec3e0b | |||
72fd1b047f | |||
178b6d4d8d | |||
d160e834eb | |||
3e8b9e7e8b | |||
c34ebfe739 | |||
571b33a11c | |||
07b90f4b4b | |||
f1630da2cc | |||
16751b5dee | |||
29ec9a436a | |||
6a7c00eaef | |||
82b24d9beb | |||
a317072e4e | |||
5b701cd197 | |||
8f035616a0 | |||
81f8ba9e4c | |||
380ab19910 | |||
4329629ee9 | |||
39fde52d8e | |||
0611f56776 | |||
8923e91e39 | |||
d6e6811bb9 | |||
f24bc5c826 | |||
c209d0d487 | |||
74dddc880d | |||
f3c41bbdf1 | |||
c45ddc8f22 | |||
84a98995bf | |||
ed83449514 | |||
9eda573a43 | |||
4f91d2512a | |||
2f5eeab567 | |||
f9fbb0eb3c | |||
43fbf4345d | |||
8262c2dd33 | |||
0e86430ea3 | |||
fc1301c92d | |||
e913e26c01 | |||
5ce4b12cc1 | |||
94429d781f | |||
321629a693 | |||
f21405399c | |||
305ca11eb5 | |||
9b1ff9b566 | |||
a0ed6ea3c8 | |||
4a6529973e | |||
9a02fac0e5 | |||
2c6a9e9e48 | |||
d91b735442 |
@ -0,0 +1,3 @@
|
||||
[build]
|
||||
|
||||
rustflags = "--cfg coloring_in_tokens"
|
||||
|
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Configuration (please complete the following information):**
|
||||
- OS: [e.g. Windows]
|
||||
- Version [e.g. 0.4.0]
|
||||
- Optional features (if any)
|
||||
|
||||
Add any other context about the problem here.
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
@ -1,8 +1,8 @@
|
||||
image:
|
||||
file: .gitpod.Dockerfile
|
||||
tasks:
|
||||
- init: cargo build
|
||||
command: cargo run
|
||||
- init: cargo install --path .
|
||||
command: nu
|
||||
github:
|
||||
prebuilds:
|
||||
# enable for the master/default branch (defaults to true)
|
||||
|
33
Cargo.lock
generated
33
Cargo.lock
generated
@ -1190,13 +1190,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "language-reporting"
|
||||
version = "0.3.1"
|
||||
source = "git+https://github.com/wycats/language-reporting#1e2100290fec96f69646e1e61482d80f7a8e7855"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)",
|
||||
"render-tree 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1487,7 +1487,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "nu"
|
||||
version = "0.3.0"
|
||||
version = "0.5.0"
|
||||
dependencies = [
|
||||
"ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1498,6 +1498,7 @@ dependencies = [
|
||||
"bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"byte-unit 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"chrono-humanize 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1519,7 +1520,7 @@ dependencies = [
|
||||
"image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"language-reporting 0.3.1 (git+https://github.com/wycats/language-reporting)",
|
||||
"language-reporting 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1540,7 +1541,7 @@ dependencies = [
|
||||
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1558,6 +1559,7 @@ dependencies = [
|
||||
"term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"trash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2007,7 +2009,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "render-tree"
|
||||
version = "0.1.1"
|
||||
source = "git+https://github.com/wycats/language-reporting#1e2100290fec96f69646e1e61482d80f7a8e7855"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2076,7 +2078,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustyline"
|
||||
version = "5.0.3"
|
||||
version = "5.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2493,6 +2495,14 @@ dependencies = [
|
||||
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trash"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.11.2"
|
||||
@ -2950,7 +2960,7 @@ dependencies = [
|
||||
"checksum jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "c1aae18ffeeae409c6622c3b6a7ee49792a7e5a062eea1b135fbb74e301792ba"
|
||||
"checksum js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "2cc9a97d7cec30128fd8b28a7c1f9df1c001ceb9b441e2b755e24130a6b43c79"
|
||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||
"checksum language-reporting 0.3.1 (git+https://github.com/wycats/language-reporting)" = "<none>"
|
||||
"checksum language-reporting 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4e6a84e1e6cccd818617d299427ad1519f127af2738b1d3a581835ef56ae298b"
|
||||
"checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
|
||||
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
"checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
|
||||
@ -3038,7 +3048,7 @@ dependencies = [
|
||||
"checksum regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "92b73c2a1770c255c240eaa4ee600df1704a38dc3feaa6e949e7fcd4f8dc09f9"
|
||||
"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
|
||||
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
|
||||
"checksum render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)" = "<none>"
|
||||
"checksum render-tree 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "68ed587df09cfb7ce1bc6fe8f77e24db219f222c049326ccbfb948ec67e31664"
|
||||
"checksum result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "194d8e591e405d1eecf28819740abed6d719d1a2db87fc0bcdedee9a26d55560"
|
||||
"checksum roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1a3193e568c6e262f817fd07af085c7f79241a947aedd3779d47eadc170e174"
|
||||
"checksum rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2a194373ef527035645a1bc21b10dc2125f73497e6e155771233eb187aedd051"
|
||||
@ -3047,7 +3057,7 @@ dependencies = [
|
||||
"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
|
||||
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
|
||||
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
||||
"checksum rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4795e277e6e57dec9df62b515cd4991371daa80e8dc8d80d596e58722b89c417"
|
||||
"checksum rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d8eb9912bc492db051324d36f5cea56984fc2afeaa5c6fa84e0b0e3cde550f"
|
||||
"checksum ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "19d2271fa48eaf61e53cc88b4ad9adcbafa2d512c531e7fadb6dc11a4d3656c5"
|
||||
"checksum safemem 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d2b08423011dae9a5ca23f07cf57dac3857f5c885d352b76f6d95f4aea9434d0"
|
||||
"checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421"
|
||||
@ -3093,6 +3103,7 @@ dependencies = [
|
||||
"checksum tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "5090db468dad16e1a7a54c8c67280c5e4b544f3d3e018f0b913b400261f85926"
|
||||
"checksum toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f"
|
||||
"checksum toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7aabe75941d914b72bf3e5d3932ed92ce0664d49d8432305a8b547c37227724"
|
||||
"checksum trash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f24d31505f49e989b1ee2c03c323251f6763d5907d471b71192dac92e323f8"
|
||||
"checksum typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d2783fe2d6b8c1101136184eb41be8b1ad379e4657050b8aaff0c79ee7575f9"
|
||||
"checksum unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150"
|
||||
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
|
||||
|
24
Cargo.toml
24
Cargo.toml
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "nu"
|
||||
version = "0.4.0"
|
||||
version = "0.5.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
description = "A shell for the GitHub era"
|
||||
license = "MIT"
|
||||
@ -14,7 +14,7 @@ documentation = "https://book.nushell.sh"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
rustyline = "5.0.3"
|
||||
rustyline = "5.0.4"
|
||||
chrono = { version = "0.4.9", features = ["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
prettytable-rs = "0.8.0"
|
||||
@ -73,8 +73,10 @@ bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
natural = "0.3.0"
|
||||
serde_urlencoded = "0.6.1"
|
||||
sublime_fuzzy = "0.5"
|
||||
trash = "1.0.0"
|
||||
regex = "1"
|
||||
cfg-if = "0.1"
|
||||
|
||||
regex = {version = "1", optional = true }
|
||||
neso = { version = "0.5.0", optional = true }
|
||||
crossterm = { version = "0.10.2", optional = true }
|
||||
syntect = {version = "3.2.0", optional = true }
|
||||
@ -83,7 +85,7 @@ heim = {version = "0.0.8", optional = true }
|
||||
battery = {version = "0.7.4", optional = true }
|
||||
rawkey = {version = "0.1.2", optional = true }
|
||||
clipboard = {version = "0.5", optional = true }
|
||||
ptree = {version = "0.2", optional = true }
|
||||
ptree = {version = "0.2" }
|
||||
image = { version = "0.22.2", default_features = false, features = ["png_codec", "jpeg"], optional = true }
|
||||
|
||||
[features]
|
||||
@ -94,7 +96,7 @@ binaryview = ["image", "crossterm"]
|
||||
sys = ["heim", "battery"]
|
||||
ps = ["heim"]
|
||||
# trace = ["nom-tracable/trace"]
|
||||
all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"]
|
||||
all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard"]
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.20.0"
|
||||
@ -119,18 +121,26 @@ path = "src/plugins/inc.rs"
|
||||
name = "nu_plugin_sum"
|
||||
path = "src/plugins/sum.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_average"
|
||||
path = "src/plugins/average.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_embed"
|
||||
path = "src/plugins/embed.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_add"
|
||||
path = "src/plugins/add.rs"
|
||||
name = "nu_plugin_insert"
|
||||
path = "src/plugins/insert.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_edit"
|
||||
path = "src/plugins/edit.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_read"
|
||||
path = "src/plugins/read.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_str"
|
||||
path = "src/plugins/str.rs"
|
||||
|
15
README.md
15
README.md
@ -32,9 +32,9 @@ Try it in Gitpod.
|
||||
|
||||
## Local
|
||||
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation).
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
||||
|
||||
To build Nu, you will need to use the **nightly** version of the compiler.
|
||||
To build Nu, you will need to use the **beta** version of the compiler.
|
||||
|
||||
Required dependencies:
|
||||
|
||||
@ -173,7 +173,7 @@ We can pipeline this into a command that gets the contents of one of the columns
|
||||
━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━
|
||||
authors │ description │ edition │ license │ name │ version
|
||||
─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼─────────
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.4.0
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.5.0
|
||||
━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━
|
||||
```
|
||||
|
||||
@ -181,7 +181,7 @@ Finally, we can use commands outside of Nu once we have the data we want:
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it
|
||||
0.4.0
|
||||
0.5.0
|
||||
```
|
||||
|
||||
Here we use the variable `$it` to refer to the value being piped to the external command.
|
||||
@ -248,16 +248,20 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
## Filters on tables (structured data)
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| add column-or-column-path value | Add a new column to the table |
|
||||
| append row-data | Append a row to the end of the table |
|
||||
| count | Show the total number of rows |
|
||||
| edit column-or-column-path value | Edit an existing column to have a new value |
|
||||
| embed column | Creates a new table of one column with the given name, and places the current table inside of it |
|
||||
| first amount | Show only the first number of rows |
|
||||
| get column-or-column-path | Open column and get data from the corresponding cells |
|
||||
| group-by column | Creates a new table with the data from the table rows grouped by the column given |
|
||||
| inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table |
|
||||
| insert column-or-column-path value | Insert a new column to the table |
|
||||
| last amount | Show only the last number of rows |
|
||||
| nth row-number | Return only the selected row |
|
||||
| pick ...columns | Down-select table to only these columns |
|
||||
| pivot --header-row <headers> | Pivot the tables, making columns into rows and vice versa |
|
||||
| prepend row-data | Prepend a row to the beginning of the table |
|
||||
| reject ...columns | Remove the given columns from the table |
|
||||
| reverse | Reverses the table. |
|
||||
| skip amount | Skip a number of rows |
|
||||
@ -291,6 +295,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
| from-xml | Parse text as .xml and create a table |
|
||||
| from-yaml | Parse text as a .yaml/.yml and create a table |
|
||||
| lines | Split single string into rows, one per line |
|
||||
| read pattern | Convert text to a table by matching the given pattern |
|
||||
| size | Gather word count statistics on the text |
|
||||
| split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names |
|
||||
| split-row sep | Split row contents over multiple rows via the separator |
|
||||
|
45
docs/commands/average.md
Normal file
45
docs/commands/average.md
Normal file
@ -0,0 +1,45 @@
|
||||
# average
|
||||
This command allows you to calculate the average of values in a column.
|
||||
|
||||
## Examples
|
||||
To get the average of the file sizes in a directory, simply pipe the size column from the ls command to the average command.
|
||||
|
||||
```shell
|
||||
> ls | get size | average
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
━━━━━━━━━
|
||||
2282.727272727273
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> pwd | split-row / | size | get chars | average
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
━━━━━━━━━
|
||||
5.250000000000000
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
||||
Note that average only works for integer and byte values. If the shell doesn't recognize the values in a column as one of those types, it will return an error.
|
||||
One way to solve this is to convert each row to an integer when possible and then pipe the result to `average`
|
||||
|
||||
```shell
|
||||
> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average
|
||||
error: Unrecognized type in stream: Primitive(String("2509000000"))
|
||||
- shell:1:0
|
||||
1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average
|
||||
| ^^^^ source
|
||||
```
|
||||
|
||||
```shell
|
||||
> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | str --to-int | average
|
||||
━━━━━━━━━━━━━━━━━━━
|
||||
<value>
|
||||
───────────────────
|
||||
3239404444.000000
|
||||
━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
|
47
docs/commands/from-csv.md
Normal file
47
docs/commands/from-csv.md
Normal file
@ -0,0 +1,47 @@
|
||||
# from-csv
|
||||
|
||||
Converts csv data into table. Use this when nushell cannot dertermine the input file extension.
|
||||
|
||||
## Example
|
||||
Let's say we have the following file :
|
||||
```shell
|
||||
> cat pets.txt
|
||||
animal, name, age
|
||||
cat, Tom, 7
|
||||
dog, Alfred, 10
|
||||
chameleon, Linda, 1
|
||||
```
|
||||
|
||||
`pets.txt` is actually a .csv file but it has the .txt extension, `open` is not able to convert it into a table :
|
||||
|
||||
```shell
|
||||
> open pets.txt
|
||||
animal, name, age
|
||||
cat, Tom, 7
|
||||
dog, Alfred, 10
|
||||
chameleon, Linda, 1
|
||||
```
|
||||
|
||||
To get a table from `pets.txt` we need to use the `from-csv` command :
|
||||
|
||||
```shell
|
||||
> open pets.txt | from-csv
|
||||
━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━
|
||||
# │ animal │ name │ age
|
||||
───┼───────────┼─────────┼──────
|
||||
0 │ cat │ Tom │ 7
|
||||
1 │ dog │ Alfred │ 10
|
||||
2 │ chameleon │ Linda │ 1
|
||||
━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━
|
||||
```
|
||||
|
||||
To ignore the csv headers use `--headerless` :
|
||||
```shell
|
||||
━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━
|
||||
# │ Column1 │ Column2 │ Column3
|
||||
───┼───────────┼─────────┼─────────
|
||||
0 │ dog │ Alfred │ 10
|
||||
1 │ chameleon │ Linda │ 1
|
||||
━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━
|
||||
```
|
||||
|
23
docs/commands/from-toml.md
Normal file
23
docs/commands/from-toml.md
Normal file
@ -0,0 +1,23 @@
|
||||
# from-toml
|
||||
Converts toml data into table. Use this when nushell cannot dertermine the input file extension.
|
||||
|
||||
## Example
|
||||
Let's say we have the following Rust .lock file :
|
||||
```shell
|
||||
> open Cargo.lock
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing. [[package]] name = "adler32" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
...
|
||||
```
|
||||
|
||||
The "Cargo.lock" file is actually a .toml file, but the file extension isn't .toml. That's okay, we can use the `from-toml` command :
|
||||
|
||||
|
||||
```shell
|
||||
> open Cargo.lock | from-toml
|
||||
━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━
|
||||
metadata │ package
|
||||
────────────────┼───────────────────
|
||||
[table: 1 row] │ [table: 154 rows]
|
||||
━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━
|
||||
```
|
56
docs/commands/sort-by.md
Normal file
56
docs/commands/sort-by.md
Normal file
@ -0,0 +1,56 @@
|
||||
|
||||
# env
|
||||
|
||||
The `sort-by` command sorts the table being displayed in the terminal by a chosen column(s).
|
||||
|
||||
`sort-by` takes multiple arguments (being the names of columns) sorting by each argument in order.
|
||||
|
||||
|
||||
## Examples -
|
||||
|
||||
```shell
|
||||
/home/example> ls | sort-by size
|
||||
━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼──────┼──────┼──────────┼────────┼────────────────┼────────────────
|
||||
0 │ az │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
1 │ a │ File │ │ 18 B │ 4 minutes ago │ 38 minutes ago
|
||||
2 │ ad │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
3 │ ac │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
4 │ ab │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
5 │ c │ File │ │ 102 B │ 35 minutes ago │ 35 minutes ago
|
||||
6 │ d │ File │ │ 189 B │ 35 minutes ago │ 34 minutes ago
|
||||
7 │ b │ File │ │ 349 B │ 35 minutes ago │ 35 minutes ago
|
||||
━━━┷━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
/home/example> ls | sort-by size name
|
||||
━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼──────┼──────┼──────────┼────────┼────────────────┼────────────────
|
||||
0 │ a │ File │ │ 18 B │ 4 minutes ago │ 39 minutes ago
|
||||
1 │ ab │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
2 │ ac │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
3 │ ad │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
4 │ az │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
5 │ c │ File │ │ 102 B │ 36 minutes ago │ 35 minutes ago
|
||||
6 │ d │ File │ │ 189 B │ 35 minutes ago │ 35 minutes ago
|
||||
7 │ b │ File │ │ 349 B │ 36 minutes ago │ 36 minutes ago
|
||||
```
|
||||
|
||||
```
|
||||
/home/example> ls | sort-by accessed
|
||||
━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼──────┼──────┼──────────┼────────┼────────────────┼────────────────
|
||||
0 │ b │ File │ │ 349 B │ 37 minutes ago │ 37 minutes ago
|
||||
1 │ c │ File │ │ 102 B │ 37 minutes ago │ 37 minutes ago
|
||||
2 │ d │ File │ │ 189 B │ 37 minutes ago │ 36 minutes ago
|
||||
3 │ a │ File │ │ 18 B │ 6 minutes ago │ 40 minutes ago
|
||||
4 │ ab │ File │ │ 18 B │ 6 minutes ago │ 6 minutes ago
|
||||
5 │ ac │ File │ │ 18 B │ 6 minutes ago │ 6 minutes ago
|
||||
6 │ ad │ File │ │ 18 B │ 5 minutes ago │ 5 minutes ago
|
||||
7 │ az │ File │ │ 18 B │ 5 minutes ago │ 5 minutes ago
|
||||
━━━┷━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
50
docs/commands/str.md
Normal file
50
docs/commands/str.md
Normal file
@ -0,0 +1,50 @@
|
||||
# str
|
||||
|
||||
Consumes either a single value or a table and converts the provided data to a string and optionally applies a change.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ /home/TUX/stuff/expr/stuff
|
||||
1 │ │ filesystem │ /
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | str path --upcase
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ /HOME/TUX/STUFF/EXPR/STUFF
|
||||
1 │ │ filesystem │ /
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | str path --downcase
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ /home/tux/stuff/expr/stuff
|
||||
1 │ │ filesystem │ /
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | str # --substring "21, 99"
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ stuff
|
||||
1 │ │ filesystem │
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | str # --substring "6,"
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ TUX/stuff/expr/stuff
|
||||
1 │ │ filesystem │
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
|
||||
> echo "1, 2, 3" | split-row "," | str --to-int | sum
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
─────────
|
||||
6
|
||||
━━━━━━━━━
|
||||
```
|
@ -1,9 +1,7 @@
|
||||
# sum
|
||||
|
||||
This command allows you to calculate the sum of values in a column.
|
||||
|
||||
## Examples
|
||||
# sum
|
||||
This command allows you to calculate the sum of values in a column.
|
||||
|
||||
## Examples
|
||||
To get the sum of the file sizes in a directory, simply pipe the size column from the ls command to the sum command.
|
||||
|
||||
```shell
|
||||
@ -15,21 +13,32 @@ To get the sum of the file sizes in a directory, simply pipe the size column fro
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
||||
Note that sum only works for integer and byte values at the moment, and if the shell doesn't recognize the values in a column as one of those types, it will return an error.
|
||||
To get the sum of the characters that make up your present working directory.
|
||||
```shell
|
||||
> pwd | split-row / | size | get chars | sum
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
━━━━━━━━━
|
||||
21
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
||||
Note that sum only works for integer and byte values. If the shell doesn't recognize the values in a column as one of those types, it will return an error.
|
||||
One way to solve this is to convert each row to an integer when possible and then pipe the result to `sum`
|
||||
|
||||
```shell
|
||||
> open example.csv
|
||||
━━━┯━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━
|
||||
# │ fruit │ amount │ quality
|
||||
───┼─────────┼────────┼──────────
|
||||
0 │ apples │ 1 │ fresh
|
||||
1 │ bananas │ 2 │ old
|
||||
2 │ oranges │ 7 │ fresh
|
||||
3 │ kiwis │ 25 │ rotten
|
||||
━━━┷━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━
|
||||
> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum
|
||||
error: Unrecognized type in stream: Primitive(String("2509000000"))
|
||||
- shell:1:0
|
||||
1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum
|
||||
| ^^^^ source
|
||||
```
|
||||
|
||||
```shell
|
||||
> open example.csv | get amount | sum
|
||||
error: Unrecognized type in stream: Primitive(String("1"))
|
||||
> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | str --to-int | sum
|
||||
━━━━━━━━━━━━━
|
||||
<value>
|
||||
─────────────
|
||||
29154639996
|
||||
━━━━━━━━━━━━━
|
||||
```
|
||||
|
47
docs/commands/tags.md
Normal file
47
docs/commands/tags.md
Normal file
@ -0,0 +1,47 @@
|
||||
# tags
|
||||
|
||||
The tags commands allows users to access the metadata of the previous value in
|
||||
the pipeline. This command may be run on multiple values of input as well.
|
||||
|
||||
As of writing this, the only metadata returned includes:
|
||||
|
||||
- `span`: the start and end indices of the previous value's substring location
|
||||
- `anchor`: the source where data was loaded from; this may not appear if the
|
||||
previous pipeline value didn't actually have a source (like trying to `open` a
|
||||
dir, or running `ls` on a dir)
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> open README.md | tags
|
||||
━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
span │ anchor
|
||||
────────────────┼──────────────────────────────────────────────────
|
||||
[table: 1 row] │ /Users/danielh/Projects/github/nushell/README.md
|
||||
━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> open README.md | tags | get span
|
||||
━━━━━━━┯━━━━━
|
||||
start │ end
|
||||
───────┼─────
|
||||
5 │ 14
|
||||
━━━━━━━┷━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> ls | tags | first 3 | get span
|
||||
━━━┯━━━━━━━┯━━━━━
|
||||
# │ start │ end
|
||||
───┼───────┼─────
|
||||
0 │ 0 │ 2
|
||||
1 │ 0 │ 2
|
||||
2 │ 0 │ 2
|
||||
━━━┷━━━━━━━┷━━━━━
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
More useful information on the `tags` command can be found by referencing [The
|
||||
Nu Book's entry on Metadata](https://book.nushell.sh/en/metadata)
|
@ -10,4 +10,12 @@ reason = """
|
||||
This is laying the groundwork for merging coloring and parsing. It also makes token_nodes.atomic() naturally
|
||||
work with coloring, which is pretty useful on its own.
|
||||
"""
|
||||
enabled = false
|
||||
|
||||
[data_processing_primitives]
|
||||
|
||||
description = "Groundwork so tables can be data processed"
|
||||
reason = """
|
||||
These will allow take tables and be able to transform, process, and explore.
|
||||
"""
|
||||
enabled = false
|
122
src/cli.rs
122
src/cli.rs
@ -14,13 +14,13 @@ use crate::git::current_branch;
|
||||
use crate::parser::registry::Signature;
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::syntax_shape::{expand_syntax, PipelineShape},
|
||||
hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator},
|
||||
hir::syntax_shape::{expand_syntax, ExpandContext, PipelineShape},
|
||||
hir::{expand_external_tokens::ExternalTokensShape, tokens_iterator::TokensIterator},
|
||||
TokenNode,
|
||||
};
|
||||
use crate::prelude::*;
|
||||
|
||||
use log::{debug, trace};
|
||||
use log::{debug, log_enabled, trace};
|
||||
use rustyline::error::ReadlineError;
|
||||
use rustyline::{self, config::Configurer, config::EditMode, ColorMode, Config, Editor};
|
||||
use std::env;
|
||||
@ -163,6 +163,8 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> {
|
||||
require_literal_leading_dot: false,
|
||||
};
|
||||
|
||||
set_env_from_config();
|
||||
|
||||
for path in search_paths() {
|
||||
let mut pattern = path.to_path_buf();
|
||||
|
||||
@ -264,6 +266,8 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(Lines),
|
||||
whole_stream_command(Reject),
|
||||
whole_stream_command(Reverse),
|
||||
whole_stream_command(Append),
|
||||
whole_stream_command(Prepend),
|
||||
whole_stream_command(Trim),
|
||||
whole_stream_command(ToBSON),
|
||||
whole_stream_command(ToCSV),
|
||||
@ -275,6 +279,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(ToURL),
|
||||
whole_stream_command(ToYAML),
|
||||
whole_stream_command(SortBy),
|
||||
whole_stream_command(GroupBy),
|
||||
whole_stream_command(Tags),
|
||||
whole_stream_command(Count),
|
||||
whole_stream_command(First),
|
||||
@ -305,6 +310,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(SkipWhile),
|
||||
per_item_command(Enter),
|
||||
per_item_command(Help),
|
||||
per_item_command(History),
|
||||
whole_stream_command(Exit),
|
||||
whole_stream_command(Autoview),
|
||||
whole_stream_command(Pivot),
|
||||
@ -316,6 +322,8 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(Table),
|
||||
whole_stream_command(Version),
|
||||
whole_stream_command(Which),
|
||||
#[cfg(data_processing_primitives)]
|
||||
whole_stream_command(SplitBy),
|
||||
]);
|
||||
|
||||
#[cfg(feature = "clipboard")]
|
||||
@ -412,6 +420,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
match process_line(readline, &mut context).await {
|
||||
LineResult::Success(line) => {
|
||||
rl.add_history_entry(line.clone());
|
||||
let _ = rl.save_history(&History::path());
|
||||
}
|
||||
|
||||
LineResult::CtrlC => {
|
||||
@ -439,6 +448,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
|
||||
LineResult::Error(line, err) => {
|
||||
rl.add_history_entry(line.clone());
|
||||
let _ = rl.save_history(&History::path());
|
||||
|
||||
context.with_host(|host| {
|
||||
print_err(err, host, &Text::from(line));
|
||||
@ -466,6 +476,70 @@ fn chomp_newline(s: &str) -> &str {
|
||||
}
|
||||
}
|
||||
|
||||
fn set_env_from_config() {
|
||||
let config = crate::data::config::read(Tag::unknown(), &None).unwrap();
|
||||
|
||||
if config.contains_key("env") {
|
||||
// Clear the existing vars, we're about to replace them
|
||||
for (key, _value) in std::env::vars() {
|
||||
std::env::remove_var(key);
|
||||
}
|
||||
|
||||
let value = config.get("env");
|
||||
|
||||
match value {
|
||||
Some(Tagged {
|
||||
item: Value::Row(r),
|
||||
..
|
||||
}) => {
|
||||
for (k, v) in &r.entries {
|
||||
match v.as_string() {
|
||||
Ok(value_string) => {
|
||||
std::env::set_var(k, value_string);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if config.contains_key("path") {
|
||||
// Override the path with what they give us from config
|
||||
let value = config.get("path");
|
||||
|
||||
match value {
|
||||
Some(value) => match value {
|
||||
Tagged {
|
||||
item: Value::Table(table),
|
||||
..
|
||||
} => {
|
||||
let mut paths = vec![];
|
||||
for val in table {
|
||||
let path_str = val.as_string();
|
||||
match path_str {
|
||||
Err(_) => {}
|
||||
Ok(path_str) => {
|
||||
paths.push(PathBuf::from(path_str));
|
||||
}
|
||||
}
|
||||
}
|
||||
let path_os_string = std::env::join_paths(&paths);
|
||||
match path_os_string {
|
||||
Ok(path_os_string) => {
|
||||
std::env::set_var("PATH", path_os_string);
|
||||
}
|
||||
Err(_) => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum LineResult {
|
||||
Success(String),
|
||||
Error(String, ShellError),
|
||||
@ -500,6 +574,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
Some(ClassifiedCommand::External(_)) => {}
|
||||
_ => pipeline
|
||||
.commands
|
||||
.item
|
||||
.push(ClassifiedCommand::Internal(InternalCommand {
|
||||
name: "autoview".to_string(),
|
||||
name_tag: Tag::unknown(),
|
||||
@ -507,14 +582,17 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
Box::new(hir::Expression::synthetic_string("autoview")),
|
||||
None,
|
||||
None,
|
||||
),
|
||||
)
|
||||
.spanned_unknown(),
|
||||
})),
|
||||
}
|
||||
|
||||
let mut input = ClassifiedInputStream::new();
|
||||
let mut iter = pipeline.commands.item.into_iter().peekable();
|
||||
|
||||
let mut iter = pipeline.commands.into_iter().peekable();
|
||||
let mut is_first_command = true;
|
||||
// Check the config to see if we need to update the path
|
||||
// TODO: make sure config is cached so we don't path this load every call
|
||||
set_env_from_config();
|
||||
|
||||
loop {
|
||||
let item: Option<ClassifiedCommand> = iter.next();
|
||||
@ -548,20 +626,20 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
(
|
||||
Some(ClassifiedCommand::Internal(left)),
|
||||
Some(ClassifiedCommand::External(_)),
|
||||
) => match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
) => match left.run(ctx, input, Text::from(line)) {
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
},
|
||||
|
||||
(Some(ClassifiedCommand::Internal(left)), Some(_)) => {
|
||||
match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
match left.run(ctx, input, Text::from(line)) {
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::Internal(left)), None) => {
|
||||
match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
match left.run(ctx, input, Text::from(line)) {
|
||||
Ok(val) => {
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
@ -613,8 +691,6 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
is_first_command = false;
|
||||
}
|
||||
|
||||
LineResult::Success(line.to_string())
|
||||
@ -636,11 +712,20 @@ fn classify_pipeline(
|
||||
let mut pipeline_list = vec![pipeline.clone()];
|
||||
let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span());
|
||||
|
||||
expand_syntax(
|
||||
let result = expand_syntax(
|
||||
&PipelineShape,
|
||||
&mut iterator,
|
||||
&context.expand_context(source, pipeline.span()),
|
||||
&context.expand_context(source),
|
||||
)
|
||||
.map_err(|err| err.into());
|
||||
|
||||
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
||||
println!("");
|
||||
ptree::print_tree(&iterator.expand_tracer().print(source.clone())).unwrap();
|
||||
println!("");
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
// Classify this command as an external command, which doesn't give special meaning
|
||||
@ -648,21 +733,22 @@ fn classify_pipeline(
|
||||
// strings.
|
||||
pub(crate) fn external_command(
|
||||
tokens: &mut TokensIterator,
|
||||
source: &Text,
|
||||
context: &ExpandContext,
|
||||
name: Tagged<&str>,
|
||||
) -> Result<ClassifiedCommand, ShellError> {
|
||||
let arg_list_strings = expand_external_tokens(tokens, source)?;
|
||||
) -> Result<ClassifiedCommand, ParseError> {
|
||||
let Spanned { item, span } = expand_syntax(&ExternalTokensShape, tokens, context)?;
|
||||
|
||||
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||
name: name.to_string(),
|
||||
name_tag: name.tag(),
|
||||
args: arg_list_strings
|
||||
args: item
|
||||
.iter()
|
||||
.map(|x| Tagged {
|
||||
tag: x.span.into(),
|
||||
item: x.item.clone(),
|
||||
})
|
||||
.collect(),
|
||||
.collect::<Vec<_>>()
|
||||
.spanned(span),
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
#[macro_use]
|
||||
pub(crate) mod macros;
|
||||
|
||||
pub(crate) mod append;
|
||||
pub(crate) mod args;
|
||||
pub(crate) mod autoview;
|
||||
pub(crate) mod cd;
|
||||
@ -30,7 +31,9 @@ pub(crate) mod from_url;
|
||||
pub(crate) mod from_xml;
|
||||
pub(crate) mod from_yaml;
|
||||
pub(crate) mod get;
|
||||
pub(crate) mod group_by;
|
||||
pub(crate) mod help;
|
||||
pub(crate) mod history;
|
||||
pub(crate) mod last;
|
||||
pub(crate) mod lines;
|
||||
pub(crate) mod ls;
|
||||
@ -43,6 +46,7 @@ pub(crate) mod pick;
|
||||
pub(crate) mod pivot;
|
||||
pub(crate) mod plugin;
|
||||
pub(crate) mod post;
|
||||
pub(crate) mod prepend;
|
||||
pub(crate) mod prev;
|
||||
pub(crate) mod pwd;
|
||||
pub(crate) mod reject;
|
||||
@ -53,6 +57,10 @@ pub(crate) mod shells;
|
||||
pub(crate) mod size;
|
||||
pub(crate) mod skip_while;
|
||||
pub(crate) mod sort_by;
|
||||
|
||||
#[cfg(data_processing_primitives)]
|
||||
pub(crate) mod split_by;
|
||||
|
||||
pub(crate) mod split_column;
|
||||
pub(crate) mod split_row;
|
||||
pub(crate) mod table;
|
||||
@ -77,6 +85,7 @@ pub(crate) use command::{
|
||||
UnevaluatedCallInfo, WholeStreamCommand,
|
||||
};
|
||||
|
||||
pub(crate) use append::Append;
|
||||
pub(crate) use classified::ClassifiedCommand;
|
||||
pub(crate) use config::Config;
|
||||
pub(crate) use count::Count;
|
||||
@ -103,7 +112,9 @@ pub(crate) use from_xml::FromXML;
|
||||
pub(crate) use from_yaml::FromYAML;
|
||||
pub(crate) use from_yaml::FromYML;
|
||||
pub(crate) use get::Get;
|
||||
pub(crate) use group_by::GroupBy;
|
||||
pub(crate) use help::Help;
|
||||
pub(crate) use history::History;
|
||||
pub(crate) use last::Last;
|
||||
pub(crate) use lines::Lines;
|
||||
pub(crate) use ls::LS;
|
||||
@ -115,6 +126,7 @@ pub(crate) use open::Open;
|
||||
pub(crate) use pick::Pick;
|
||||
pub(crate) use pivot::Pivot;
|
||||
pub(crate) use post::Post;
|
||||
pub(crate) use prepend::Prepend;
|
||||
pub(crate) use prev::Previous;
|
||||
pub(crate) use pwd::PWD;
|
||||
pub(crate) use reject::Reject;
|
||||
@ -125,6 +137,10 @@ pub(crate) use shells::Shells;
|
||||
pub(crate) use size::Size;
|
||||
pub(crate) use skip_while::SkipWhile;
|
||||
pub(crate) use sort_by::SortBy;
|
||||
|
||||
#[cfg(data_processing_primitives)]
|
||||
pub(crate) use split_by::SplitBy;
|
||||
|
||||
pub(crate) use split_column::SplitColumn;
|
||||
pub(crate) use split_row::SplitRow;
|
||||
pub(crate) use table::Table;
|
||||
|
47
src/commands/append.rs
Normal file
47
src/commands/append.rs
Normal file
@ -0,0 +1,47 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct AppendArgs {
|
||||
row: Tagged<Value>,
|
||||
}
|
||||
|
||||
pub struct Append;
|
||||
|
||||
impl WholeStreamCommand for Append {
|
||||
fn name(&self) -> &str {
|
||||
"append"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("append").required(
|
||||
"row value",
|
||||
SyntaxShape::Any,
|
||||
"the value of the row to append to the table",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Append the given row to the table"
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, append)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn append(
|
||||
AppendArgs { row }: AppendArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let mut after: VecDeque<Tagged<Value>> = VecDeque::new();
|
||||
after.push_back(row);
|
||||
|
||||
Ok(OutputStream::from_input(input.values.chain(after)))
|
||||
}
|
@ -96,7 +96,7 @@ pub fn autoview(
|
||||
named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown())));
|
||||
command_args.call_info.args.named = Some(named_args);
|
||||
|
||||
let result = table.run(command_args, &context.commands, false);
|
||||
let result = table.run(command_args, &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
|
||||
if finished {
|
||||
@ -117,7 +117,7 @@ pub fn autoview(
|
||||
if let Some(text) = text {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(Value::string(s).tagged(Tag { anchor, span }));
|
||||
let result = text.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
let result = text.run(raw.with_input(stream.into()), &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
println!("{}", s);
|
||||
@ -134,7 +134,7 @@ pub fn autoview(
|
||||
if let Some(binary) = binary {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(x.clone());
|
||||
let result = binary.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
let result = binary.run(raw.with_input(stream.into()), &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
use pretty_hex::*;
|
||||
@ -149,7 +149,7 @@ pub fn autoview(
|
||||
if let Some(table) = table {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(x.clone());
|
||||
let result = table.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
let result = table.run(raw.with_input(stream.into()), &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
println!("{:?}", item);
|
||||
|
@ -10,7 +10,11 @@ impl WholeStreamCommand for CD {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("cd").optional("directory", SyntaxShape::Path)
|
||||
Signature::build("cd").optional(
|
||||
"directory",
|
||||
SyntaxShape::Path,
|
||||
"the directory to change to",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -4,7 +4,9 @@ use bytes::{BufMut, BytesMut};
|
||||
use derive_new::new;
|
||||
use futures::stream::StreamExt;
|
||||
use futures_codec::{Decoder, Encoder, Framed};
|
||||
use itertools::Itertools;
|
||||
use log::{log_enabled, trace};
|
||||
use std::fmt;
|
||||
use std::io::{Error, ErrorKind};
|
||||
use subprocess::Exec;
|
||||
|
||||
@ -52,7 +54,7 @@ pub(crate) struct ClassifiedInputStream {
|
||||
impl ClassifiedInputStream {
|
||||
pub(crate) fn new() -> ClassifiedInputStream {
|
||||
ClassifiedInputStream {
|
||||
objects: VecDeque::new().into(),
|
||||
objects: vec![Value::nothing().tagged(Tag::unknown())].into(),
|
||||
stdin: None,
|
||||
}
|
||||
}
|
||||
@ -72,26 +74,77 @@ impl ClassifiedInputStream {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ClassifiedPipeline {
|
||||
pub(crate) commands: Vec<ClassifiedCommand>,
|
||||
pub(crate) commands: Spanned<Vec<ClassifiedCommand>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
impl FormatDebug for ClassifiedPipeline {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
f.say_str(
|
||||
"classified pipeline",
|
||||
self.commands.iter().map(|c| c.debug(source)).join(" | "),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ClassifiedPipeline {
|
||||
fn span(&self) -> Span {
|
||||
self.commands.span
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub(crate) enum ClassifiedCommand {
|
||||
#[allow(unused)]
|
||||
Expr(TokenNode),
|
||||
Internal(InternalCommand),
|
||||
#[allow(unused)]
|
||||
Dynamic(hir::Call),
|
||||
Dynamic(Spanned<hir::Call>),
|
||||
External(ExternalCommand),
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
impl FormatDebug for ClassifiedCommand {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
match self {
|
||||
ClassifiedCommand::Expr(expr) => expr.fmt_debug(f, source),
|
||||
ClassifiedCommand::Internal(internal) => internal.fmt_debug(f, source),
|
||||
ClassifiedCommand::Dynamic(dynamic) => dynamic.fmt_debug(f, source),
|
||||
ClassifiedCommand::External(external) => external.fmt_debug(f, source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ClassifiedCommand {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
ClassifiedCommand::Expr(node) => node.span(),
|
||||
ClassifiedCommand::Internal(command) => command.span(),
|
||||
ClassifiedCommand::Dynamic(call) => call.span,
|
||||
ClassifiedCommand::External(command) => command.span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Clone, Eq, PartialEq)]
|
||||
pub(crate) struct InternalCommand {
|
||||
pub(crate) name: String,
|
||||
pub(crate) name_tag: Tag,
|
||||
pub(crate) args: hir::Call,
|
||||
pub(crate) args: Spanned<hir::Call>,
|
||||
}
|
||||
|
||||
impl HasSpan for InternalCommand {
|
||||
fn span(&self) -> Span {
|
||||
let start = self.name_tag.span;
|
||||
|
||||
start.until(self.args.span)
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatDebug for InternalCommand {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
f.say("internal", self.args.debug(source))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
@ -105,7 +158,6 @@ impl InternalCommand {
|
||||
context: &mut Context,
|
||||
input: ClassifiedInputStream,
|
||||
source: Text,
|
||||
is_first_command: bool,
|
||||
) -> Result<InputStream, ShellError> {
|
||||
if log_enabled!(log::Level::Trace) {
|
||||
trace!(target: "nu::run::internal", "->");
|
||||
@ -122,10 +174,9 @@ impl InternalCommand {
|
||||
context.run_command(
|
||||
command,
|
||||
self.name_tag.clone(),
|
||||
self.args,
|
||||
self.args.item,
|
||||
&source,
|
||||
objects,
|
||||
is_first_command,
|
||||
)
|
||||
};
|
||||
|
||||
@ -201,12 +252,31 @@ impl InternalCommand {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub(crate) struct ExternalCommand {
|
||||
pub(crate) name: String,
|
||||
|
||||
pub(crate) name_tag: Tag,
|
||||
pub(crate) args: Vec<Tagged<String>>,
|
||||
pub(crate) args: Spanned<Vec<Tagged<String>>>,
|
||||
}
|
||||
|
||||
impl FormatDebug for ExternalCommand {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
write!(f, "{}", self.name)?;
|
||||
|
||||
if self.args.item.len() > 0 {
|
||||
write!(f, " ")?;
|
||||
write!(f, "{}", self.args.iter().map(|i| i.debug(source)).join(" "))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ExternalCommand {
|
||||
fn span(&self) -> Span {
|
||||
self.name_tag.span.until(self.args.span)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -230,7 +300,7 @@ impl ExternalCommand {
|
||||
trace!(target: "nu::run::external", "inputs = {:?}", inputs);
|
||||
|
||||
let mut arg_string = format!("{}", self.name);
|
||||
for arg in &self.args {
|
||||
for arg in &self.args.item {
|
||||
arg_string.push_str(&arg);
|
||||
}
|
||||
|
||||
@ -275,7 +345,7 @@ impl ExternalCommand {
|
||||
process = Exec::shell(itertools::join(commands, " && "))
|
||||
} else {
|
||||
process = Exec::cmd(&self.name);
|
||||
for arg in &self.args {
|
||||
for arg in &self.args.item {
|
||||
let arg_chars: Vec<_> = arg.chars().collect();
|
||||
if arg_chars.len() > 1
|
||||
&& arg_chars[0] == '"'
|
||||
|
@ -19,8 +19,8 @@ pub struct UnevaluatedCallInfo {
|
||||
pub name_tag: Tag,
|
||||
}
|
||||
|
||||
impl ToDebug for UnevaluatedCallInfo {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl FormatDebug for UnevaluatedCallInfo {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
self.args.fmt_debug(f, source)
|
||||
}
|
||||
}
|
||||
@ -96,8 +96,14 @@ impl RawCommandArgs {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for CommandArgs {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl std::fmt::Debug for CommandArgs {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.call_info.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatDebug for CommandArgs {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
self.call_info.fmt_debug(f, source)
|
||||
}
|
||||
}
|
||||
@ -377,7 +383,7 @@ impl EvaluatedCommandArgs {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum CommandAction {
|
||||
ChangePath(String),
|
||||
Exit,
|
||||
@ -389,8 +395,8 @@ pub enum CommandAction {
|
||||
LeaveShell,
|
||||
}
|
||||
|
||||
impl ToDebug for CommandAction {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
|
||||
impl FormatDebug for CommandAction {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result {
|
||||
match self {
|
||||
CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s),
|
||||
CommandAction::Exit => write!(f, "action:exit"),
|
||||
@ -408,7 +414,7 @@ impl ToDebug for CommandAction {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum ReturnSuccess {
|
||||
Value(Tagged<Value>),
|
||||
Action(CommandAction),
|
||||
@ -416,8 +422,8 @@ pub enum ReturnSuccess {
|
||||
|
||||
pub type ReturnValue = Result<ReturnSuccess, ShellError>;
|
||||
|
||||
impl ToDebug for ReturnValue {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl FormatDebug for ReturnValue {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
match self {
|
||||
Err(err) => write!(f, "{}", err.debug(source)),
|
||||
Ok(ReturnSuccess::Value(v)) => write!(f, "{:?}", v.debug()),
|
||||
@ -538,20 +544,13 @@ impl Command {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: ®istry::CommandRegistry,
|
||||
is_first_command: bool,
|
||||
) -> OutputStream {
|
||||
pub fn run(&self, args: CommandArgs, registry: ®istry::CommandRegistry) -> OutputStream {
|
||||
match self {
|
||||
Command::WholeStream(command) => match command.run(args, registry) {
|
||||
Ok(stream) => stream,
|
||||
Err(err) => OutputStream::one(Err(err)),
|
||||
},
|
||||
Command::PerItem(command) => {
|
||||
self.run_helper(command.clone(), args, registry.clone(), is_first_command)
|
||||
}
|
||||
Command::PerItem(command) => self.run_helper(command.clone(), args, registry.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -560,7 +559,6 @@ impl Command {
|
||||
command: Arc<dyn PerItemCommand>,
|
||||
args: CommandArgs,
|
||||
registry: CommandRegistry,
|
||||
is_first_command: bool,
|
||||
) -> OutputStream {
|
||||
let raw_args = RawCommandArgs {
|
||||
host: args.host,
|
||||
@ -569,40 +567,23 @@ impl Command {
|
||||
call_info: args.call_info,
|
||||
};
|
||||
|
||||
if !is_first_command {
|
||||
let out = args
|
||||
.input
|
||||
.values
|
||||
.map(move |x| {
|
||||
let call_info = raw_args
|
||||
.clone()
|
||||
.call_info
|
||||
.evaluate(®istry, &Scope::it_value(x.clone()))
|
||||
.unwrap();
|
||||
match command.run(&call_info, ®istry, &raw_args, x) {
|
||||
Ok(o) => o,
|
||||
Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(),
|
||||
}
|
||||
})
|
||||
.flatten();
|
||||
let out = args
|
||||
.input
|
||||
.values
|
||||
.map(move |x| {
|
||||
let call_info = raw_args
|
||||
.clone()
|
||||
.call_info
|
||||
.evaluate(®istry, &Scope::it_value(x.clone()))
|
||||
.unwrap();
|
||||
match command.run(&call_info, ®istry, &raw_args, x) {
|
||||
Ok(o) => o,
|
||||
Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(),
|
||||
}
|
||||
})
|
||||
.flatten();
|
||||
|
||||
out.to_output_stream()
|
||||
} else {
|
||||
let nothing = Value::nothing().tagged(Tag::unknown());
|
||||
let call_info = raw_args
|
||||
.clone()
|
||||
.call_info
|
||||
.evaluate(®istry, &Scope::it_value(nothing.clone()))
|
||||
.unwrap();
|
||||
|
||||
match command
|
||||
.run(&call_info, ®istry, &raw_args, nothing)
|
||||
.into()
|
||||
{
|
||||
Ok(o) => o,
|
||||
Err(e) => OutputStream::one(Err(e)),
|
||||
}
|
||||
}
|
||||
out.to_output_stream()
|
||||
}
|
||||
|
||||
pub fn is_binary(&self) -> bool {
|
||||
|
@ -4,7 +4,6 @@ use crate::errors::ShellError;
|
||||
use crate::parser::hir::SyntaxShape;
|
||||
use crate::parser::registry::{self};
|
||||
use crate::prelude::*;
|
||||
use std::iter::FromIterator;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct Config;
|
||||
@ -13,6 +12,7 @@ pub struct Config;
|
||||
pub struct ConfigArgs {
|
||||
load: Option<Tagged<PathBuf>>,
|
||||
set: Option<(Tagged<String>, Tagged<Value>)>,
|
||||
set_into: Option<Tagged<String>>,
|
||||
get: Option<Tagged<String>>,
|
||||
clear: Tagged<bool>,
|
||||
remove: Option<Tagged<String>>,
|
||||
@ -26,12 +26,25 @@ impl WholeStreamCommand for Config {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("config")
|
||||
.named("load", SyntaxShape::Path)
|
||||
.named("set", SyntaxShape::Any)
|
||||
.named("get", SyntaxShape::Any)
|
||||
.named("remove", SyntaxShape::Any)
|
||||
.switch("clear")
|
||||
.switch("path")
|
||||
.named(
|
||||
"load",
|
||||
SyntaxShape::Path,
|
||||
"load the config from the path give",
|
||||
)
|
||||
.named(
|
||||
"set",
|
||||
SyntaxShape::Any,
|
||||
"set a value in the config, eg) --set [key value]",
|
||||
)
|
||||
.named(
|
||||
"set_into",
|
||||
SyntaxShape::Member,
|
||||
"sets a variable from values in the pipeline",
|
||||
)
|
||||
.named("get", SyntaxShape::Any, "get a value from the config")
|
||||
.named("remove", SyntaxShape::Any, "remove a value from the config")
|
||||
.switch("clear", "clear the config")
|
||||
.switch("path", "return the path to the config file")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -51,85 +64,110 @@ pub fn config(
|
||||
ConfigArgs {
|
||||
load,
|
||||
set,
|
||||
set_into,
|
||||
get,
|
||||
clear,
|
||||
remove,
|
||||
path,
|
||||
}: ConfigArgs,
|
||||
RunnableContext { name, .. }: RunnableContext,
|
||||
RunnableContext { name, input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_span = name.clone();
|
||||
|
||||
let configuration = if let Some(supplied) = load {
|
||||
Some(supplied.item().clone())
|
||||
} else {
|
||||
None
|
||||
let stream = async_stream! {
|
||||
let configuration = if let Some(supplied) = load {
|
||||
Some(supplied.item().clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut result = crate::data::config::read(name_span, &configuration)?;
|
||||
|
||||
if let Some(v) = get {
|
||||
let key = v.to_string();
|
||||
let value = result
|
||||
.get(&key)
|
||||
.ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?;
|
||||
|
||||
match value {
|
||||
Tagged {
|
||||
item: Value::Table(list),
|
||||
..
|
||||
} => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l.clone());
|
||||
}
|
||||
}
|
||||
x => yield ReturnSuccess::value(x.clone()),
|
||||
}
|
||||
}
|
||||
else if let Some((key, value)) = set {
|
||||
result.insert(key.to_string(), value.clone());
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
yield ReturnSuccess::value(Value::Row(result.into()).tagged(value.tag()));
|
||||
}
|
||||
else if let Some(v) = set_into {
|
||||
let rows: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let key = v.to_string();
|
||||
|
||||
if rows.len() == 0 {
|
||||
yield Err(ShellError::labeled_error("No values given for set_into", "needs value(s) from pipeline", v.tag()));
|
||||
} else if rows.len() == 1 {
|
||||
// A single value
|
||||
let value = &rows[0];
|
||||
|
||||
result.insert(key.to_string(), value.clone());
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
yield ReturnSuccess::value(Value::Row(result.into()).tagged(name));
|
||||
} else {
|
||||
// Take in the pipeline as a table
|
||||
let value = Value::Table(rows).tagged(name.clone());
|
||||
|
||||
result.insert(key.to_string(), value.clone());
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
yield ReturnSuccess::value(Value::Row(result.into()).tagged(name));
|
||||
}
|
||||
}
|
||||
else if let Tagged { item: true, tag } = clear {
|
||||
result.clear();
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
yield ReturnSuccess::value(Value::Row(result.into()).tagged(tag));
|
||||
|
||||
return;
|
||||
}
|
||||
else if let Tagged { item: true, tag } = path {
|
||||
let path = config::default_path_for(&configuration)?;
|
||||
|
||||
yield ReturnSuccess::value(Value::Primitive(Primitive::Path(path)).tagged(tag));
|
||||
}
|
||||
else if let Some(v) = remove {
|
||||
let key = v.to_string();
|
||||
|
||||
if result.contains_key(&key) {
|
||||
result.swap_remove(&key);
|
||||
config::write(&result, &configuration).unwrap();
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Key does not exist in config",
|
||||
"key",
|
||||
v.tag(),
|
||||
));
|
||||
}
|
||||
|
||||
yield ReturnSuccess::value(Value::Row(result.into()).tagged(v.tag()));
|
||||
}
|
||||
else {
|
||||
yield ReturnSuccess::value(Value::Row(result.into()).tagged(name));
|
||||
}
|
||||
};
|
||||
|
||||
let mut result = crate::data::config::read(name_span, &configuration)?;
|
||||
|
||||
if let Some(v) = get {
|
||||
let key = v.to_string();
|
||||
let value = result
|
||||
.get(&key)
|
||||
.ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?;
|
||||
|
||||
let mut results = VecDeque::new();
|
||||
|
||||
match value {
|
||||
Tagged {
|
||||
item: Value::Table(list),
|
||||
..
|
||||
} => {
|
||||
for l in list {
|
||||
results.push_back(ReturnSuccess::value(l.clone()));
|
||||
}
|
||||
}
|
||||
x => results.push_back(ReturnSuccess::value(x.clone())),
|
||||
}
|
||||
|
||||
return Ok(results.to_output_stream());
|
||||
}
|
||||
|
||||
if let Some((key, value)) = set {
|
||||
result.insert(key.to_string(), value.clone());
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
return Ok(stream![Value::Row(result.into()).tagged(value.tag())].from_input_stream());
|
||||
}
|
||||
|
||||
if let Tagged { item: true, tag } = clear {
|
||||
result.clear();
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
return Ok(stream![Value::Row(result.into()).tagged(tag)].from_input_stream());
|
||||
}
|
||||
|
||||
if let Tagged { item: true, tag } = path {
|
||||
let path = config::default_path_for(&configuration)?;
|
||||
|
||||
return Ok(stream![Value::Primitive(Primitive::Path(path)).tagged(tag)].from_input_stream());
|
||||
}
|
||||
|
||||
if let Some(v) = remove {
|
||||
let key = v.to_string();
|
||||
|
||||
if result.contains_key(&key) {
|
||||
result.swap_remove(&key);
|
||||
config::write(&result, &configuration)?;
|
||||
} else {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Key does not exist in config",
|
||||
"key",
|
||||
v.tag(),
|
||||
));
|
||||
}
|
||||
|
||||
let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]);
|
||||
return Ok(obj.from_input_stream());
|
||||
}
|
||||
|
||||
return Ok(vec![Value::Row(result.into()).tagged(name)].into());
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
@ -21,10 +21,9 @@ impl PerItemCommand for Cpy {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("cp")
|
||||
.required("src", SyntaxShape::Pattern)
|
||||
.required("dst", SyntaxShape::Path)
|
||||
.named("file", SyntaxShape::Any)
|
||||
.switch("recursive")
|
||||
.required("src", SyntaxShape::Pattern, "the place to copy from")
|
||||
.required("dst", SyntaxShape::Path, "the place to copy to")
|
||||
.switch("recursive", "copy recursively through subdirectories")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -17,7 +17,9 @@ impl WholeStreamCommand for Date {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("date").switch("utc").switch("local")
|
||||
Signature::build("date")
|
||||
.switch("utc", "use universal time (UTC)")
|
||||
.switch("local", "use the local time")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -12,7 +12,7 @@ impl PerItemCommand for Echo {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("echo").rest(SyntaxShape::Any)
|
||||
Signature::build("echo").rest(SyntaxShape::Any, "the values to echo")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -35,37 +35,34 @@ fn run(
|
||||
_registry: &CommandRegistry,
|
||||
_raw_args: &RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name = call_info.name_tag.clone();
|
||||
|
||||
let mut output = String::new();
|
||||
|
||||
let mut first = true;
|
||||
let mut output = vec![];
|
||||
|
||||
if let Some(ref positional) = call_info.args.positional {
|
||||
for i in positional {
|
||||
match i.as_string() {
|
||||
Ok(s) => {
|
||||
if !first {
|
||||
output.push_str(" ");
|
||||
} else {
|
||||
first = false;
|
||||
output.push(Ok(ReturnSuccess::Value(
|
||||
Value::string(s).tagged(i.tag.clone()),
|
||||
)));
|
||||
}
|
||||
_ => match i {
|
||||
Tagged {
|
||||
item: Value::Table(table),
|
||||
..
|
||||
} => {
|
||||
for item in table {
|
||||
output.push(Ok(ReturnSuccess::Value(item.clone())));
|
||||
}
|
||||
}
|
||||
|
||||
output.push_str(&s);
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"a string-compatible value",
|
||||
i.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
_ => {
|
||||
output.push(Ok(ReturnSuccess::Value(i.clone())));
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let stream = VecDeque::from(vec![Ok(ReturnSuccess::Value(
|
||||
Value::string(output).tagged(name),
|
||||
))]);
|
||||
let stream = VecDeque::from(output);
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
use crate::commands::command::CommandAction;
|
||||
use crate::commands::PerItemCommand;
|
||||
use crate::commands::UnevaluatedCallInfo;
|
||||
use crate::data::meta::Span;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::registry;
|
||||
use crate::prelude::*;
|
||||
@ -15,7 +14,11 @@ impl PerItemCommand for Enter {
|
||||
}
|
||||
|
||||
fn signature(&self) -> registry::Signature {
|
||||
Signature::build("enter").required("location", SyntaxShape::Path)
|
||||
Signature::build("enter").required(
|
||||
"location",
|
||||
SyntaxShape::Path,
|
||||
"the location to create a new shell from",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -34,10 +37,12 @@ impl PerItemCommand for Enter {
|
||||
match call_info.args.expect_nth(0)? {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::Path(location)),
|
||||
tag,
|
||||
..
|
||||
} => {
|
||||
let location_string = location.display().to_string();
|
||||
let location_clone = location_string.clone();
|
||||
let tag_clone = tag.clone();
|
||||
|
||||
if location.starts_with("help") {
|
||||
let spec = location_string.split(":").collect::<Vec<&str>>();
|
||||
@ -71,9 +76,8 @@ impl PerItemCommand for Enter {
|
||||
crate::commands::open::fetch(
|
||||
&full_path,
|
||||
&location_clone,
|
||||
Span::unknown(),
|
||||
)
|
||||
.await.unwrap();
|
||||
tag_clone.span,
|
||||
).await?;
|
||||
|
||||
match contents {
|
||||
Value::Primitive(Primitive::String(_)) => {
|
||||
@ -101,7 +105,6 @@ impl PerItemCommand for Enter {
|
||||
let mut result = converter.run(
|
||||
new_args.with_input(vec![tagged_contents]),
|
||||
®istry,
|
||||
false
|
||||
);
|
||||
let result_vec: Vec<Result<ReturnSuccess, ShellError>> =
|
||||
result.drain_vec().await;
|
||||
|
@ -11,7 +11,7 @@ impl WholeStreamCommand for Exit {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("exit").switch("now")
|
||||
Signature::build("exit").switch("now", "exit out of the shell immediately")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -19,8 +19,12 @@ impl PerItemCommand for Fetch {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.required("path", SyntaxShape::Path)
|
||||
.switch("raw")
|
||||
.required(
|
||||
"path",
|
||||
SyntaxShape::Path,
|
||||
"the URL to fetch the contents from",
|
||||
)
|
||||
.switch("raw", "fetch contents as text rather than a table")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -96,7 +100,7 @@ fn run(
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false);
|
||||
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry);
|
||||
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
|
||||
for res in result_vec {
|
||||
match res {
|
||||
|
@ -16,7 +16,11 @@ impl WholeStreamCommand for First {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("first").optional("rows", SyntaxShape::Int)
|
||||
Signature::build("first").optional(
|
||||
"rows",
|
||||
SyntaxShape::Int,
|
||||
"starting from the front, the number of rows to return",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -16,7 +16,8 @@ impl WholeStreamCommand for FromCSV {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-csv").switch("headerless")
|
||||
Signature::build("from-csv")
|
||||
.switch("headerless", "don't treat the first row as column names")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -15,7 +15,7 @@ impl WholeStreamCommand for FromJSON {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-json").switch("objects")
|
||||
Signature::build("from-json").switch("objects", "treat each line as a separate value")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -7,6 +7,8 @@ pub struct FromSSV;
|
||||
#[derive(Deserialize)]
|
||||
pub struct FromSSVArgs {
|
||||
headerless: bool,
|
||||
#[serde(rename(deserialize = "aligned-columns"))]
|
||||
aligned_columns: bool,
|
||||
#[serde(rename(deserialize = "minimum-spaces"))]
|
||||
minimum_spaces: Option<Tagged<usize>>,
|
||||
}
|
||||
@ -21,8 +23,13 @@ impl WholeStreamCommand for FromSSV {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(STRING_REPRESENTATION)
|
||||
.switch("headerless")
|
||||
.named("minimum-spaces", SyntaxShape::Int)
|
||||
.switch("headerless", "don't treat the first row as column names")
|
||||
.switch("aligned-columns", "assume columns are aligned")
|
||||
.named(
|
||||
"minimum-spaces",
|
||||
SyntaxShape::Int,
|
||||
"the mininum spaces to separate columns",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -41,52 +48,94 @@ impl WholeStreamCommand for FromSSV {
|
||||
fn string_to_table(
|
||||
s: &str,
|
||||
headerless: bool,
|
||||
aligned_columns: bool,
|
||||
split_at: usize,
|
||||
) -> Option<Vec<Vec<(String, String)>>> {
|
||||
let mut lines = s.lines().filter(|l| !l.trim().is_empty());
|
||||
let separator = " ".repeat(std::cmp::max(split_at, 1));
|
||||
|
||||
let headers = lines
|
||||
.next()?
|
||||
.split(&separator)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|s| s.to_owned())
|
||||
.collect::<Vec<String>>();
|
||||
if aligned_columns {
|
||||
let headers_raw = lines.next()?;
|
||||
|
||||
let header_row = if headerless {
|
||||
(1..=headers.len())
|
||||
.map(|i| format!("Column{}", i))
|
||||
.collect::<Vec<String>>()
|
||||
let headers = headers_raw
|
||||
.trim()
|
||||
.split(&separator)
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|s| (headers_raw.find(s).unwrap(), s.to_owned()));
|
||||
|
||||
let columns = if headerless {
|
||||
headers
|
||||
.enumerate()
|
||||
.map(|(header_no, (string_index, _))| {
|
||||
(string_index, format!("Column{}", header_no + 1))
|
||||
})
|
||||
.collect::<Vec<(usize, String)>>()
|
||||
} else {
|
||||
headers.collect::<Vec<(usize, String)>>()
|
||||
};
|
||||
|
||||
Some(
|
||||
lines
|
||||
.map(|l| {
|
||||
columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(i, (start, col))| {
|
||||
(match columns.get(i + 1) {
|
||||
Some((end, _)) => l.get(*start..*end),
|
||||
None => l.get(*start..),
|
||||
})
|
||||
.and_then(|s| Some((col.clone(), String::from(s.trim()))))
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
} else {
|
||||
headers
|
||||
};
|
||||
let headers = lines
|
||||
.next()?
|
||||
.split(&separator)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|s| s.to_owned())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
Some(
|
||||
lines
|
||||
.map(|l| {
|
||||
header_row
|
||||
.iter()
|
||||
.zip(
|
||||
l.split(&separator)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty()),
|
||||
)
|
||||
.map(|(a, b)| (String::from(a), String::from(b)))
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
let header_row = if headerless {
|
||||
(1..=headers.len())
|
||||
.map(|i| format!("Column{}", i))
|
||||
.collect::<Vec<String>>()
|
||||
} else {
|
||||
headers
|
||||
};
|
||||
|
||||
Some(
|
||||
lines
|
||||
.map(|l| {
|
||||
header_row
|
||||
.iter()
|
||||
.zip(
|
||||
l.split(&separator)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty()),
|
||||
)
|
||||
.map(|(a, b)| (String::from(a), String::from(b)))
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn from_ssv_string_to_value(
|
||||
s: &str,
|
||||
headerless: bool,
|
||||
aligned_columns: bool,
|
||||
split_at: usize,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Option<Tagged<Value>> {
|
||||
let tag = tag.into();
|
||||
let rows = string_to_table(s, headerless, split_at)?
|
||||
let rows = string_to_table(s, headerless, aligned_columns, split_at)?
|
||||
.iter()
|
||||
.map(|row| {
|
||||
let mut tagged_dict = TaggedDictBuilder::new(&tag);
|
||||
@ -106,6 +155,7 @@ fn from_ssv_string_to_value(
|
||||
fn from_ssv(
|
||||
FromSSVArgs {
|
||||
headerless,
|
||||
aligned_columns,
|
||||
minimum_spaces,
|
||||
}: FromSSVArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
@ -136,7 +186,7 @@ fn from_ssv(
|
||||
}
|
||||
}
|
||||
|
||||
match from_ssv_string_to_value(&concat_string, headerless, split_at, name.clone()) {
|
||||
match from_ssv_string_to_value(&concat_string, headerless, aligned_columns, split_at, name.clone()) {
|
||||
Some(x) => match x {
|
||||
Tagged { item: Value::Table(list), ..} => {
|
||||
for l in list { yield ReturnSuccess::value(l) }
|
||||
@ -171,11 +221,11 @@ mod tests {
|
||||
|
||||
a b
|
||||
|
||||
1 2
|
||||
1 2
|
||||
|
||||
3 4
|
||||
3 4
|
||||
"#;
|
||||
let result = string_to_table(input, false, 1);
|
||||
let result = string_to_table(input, false, true, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
@ -185,6 +235,20 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_deals_with_single_column_input() {
|
||||
let input = r#"
|
||||
a
|
||||
1
|
||||
2
|
||||
"#;
|
||||
let result = string_to_table(input, false, true, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![vec![owned("a", "1")], vec![owned("a", "2")]])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_ignores_headers_when_headerless() {
|
||||
let input = r#"
|
||||
@ -192,7 +256,7 @@ mod tests {
|
||||
1 2
|
||||
3 4
|
||||
"#;
|
||||
let result = string_to_table(input, true, 1);
|
||||
let result = string_to_table(input, true, true, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
@ -205,19 +269,19 @@ mod tests {
|
||||
#[test]
|
||||
fn it_returns_none_given_an_empty_string() {
|
||||
let input = "";
|
||||
let result = string_to_table(input, true, 1);
|
||||
assert_eq!(result, None);
|
||||
let result = string_to_table(input, true, true, 1);
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_allows_a_predefined_number_of_spaces() {
|
||||
let input = r#"
|
||||
column a column b
|
||||
entry 1 entry number 2
|
||||
3 four
|
||||
entry 1 entry number 2
|
||||
3 four
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, false, 3);
|
||||
let result = string_to_table(input, false, true, 3);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
@ -239,12 +303,58 @@ mod tests {
|
||||
|
||||
let trimmed = |s: &str| s.trim() == s;
|
||||
|
||||
let result = string_to_table(input, false, 2).unwrap();
|
||||
let result = string_to_table(input, false, true, 2).unwrap();
|
||||
assert!(result
|
||||
.iter()
|
||||
.all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b))))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_keeps_empty_columns() {
|
||||
let input = r#"
|
||||
colA col B col C
|
||||
val2 val3
|
||||
val4 val 5 val 6
|
||||
val7 val8
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, false, true, 2).unwrap();
|
||||
assert_eq!(
|
||||
true,
|
||||
result
|
||||
.iter()
|
||||
.all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b)))
|
||||
result,
|
||||
vec![
|
||||
vec![
|
||||
owned("colA", ""),
|
||||
owned("col B", "val2"),
|
||||
owned("col C", "val3")
|
||||
],
|
||||
vec![
|
||||
owned("colA", "val4"),
|
||||
owned("col B", "val 5"),
|
||||
owned("col C", "val 6")
|
||||
],
|
||||
vec![
|
||||
owned("colA", "val7"),
|
||||
owned("col B", ""),
|
||||
owned("col C", "val8")
|
||||
],
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_uses_the_full_final_column() {
|
||||
let input = r#"
|
||||
colA col B
|
||||
val1 val2 trailing value that should be included
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, false, true, 2).unwrap();
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![vec![
|
||||
owned("colA", "val1"),
|
||||
owned("col B", "val2 trailing value that should be included"),
|
||||
],]
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,8 @@ impl WholeStreamCommand for FromTSV {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-tsv").switch("headerless")
|
||||
Signature::build("from-tsv")
|
||||
.switch("headerless", "don't treat the first row as column names")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -134,3 +134,73 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::commands::from_xml;
|
||||
use crate::data::meta::*;
|
||||
use crate::Value;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
fn string(input: impl Into<String>) -> Tagged<Value> {
|
||||
Value::string(input.into()).tagged_unknown()
|
||||
}
|
||||
|
||||
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
|
||||
Value::row(entries).tagged_unknown()
|
||||
}
|
||||
|
||||
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
|
||||
Value::table(list).tagged_unknown()
|
||||
}
|
||||
|
||||
fn parse(xml: &str) -> Tagged<Value> {
|
||||
from_xml::from_xml_string_to_value(xml.to_string(), Tag::unknown()).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_empty_element() {
|
||||
let source = "<nu></nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source),
|
||||
row(indexmap! {
|
||||
"nu".into() => table(&vec![])
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_element_with_text() {
|
||||
let source = "<nu>La era de los tres caballeros</nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source),
|
||||
row(indexmap! {
|
||||
"nu".into() => table(&vec![string("La era de los tres caballeros")])
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_element_with_elements() {
|
||||
let source = "\
|
||||
<nu>
|
||||
<dev>Andrés</dev>
|
||||
<dev>Jonathan</dev>
|
||||
<dev>Yehuda</dev>
|
||||
</nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source),
|
||||
row(indexmap! {
|
||||
"nu".into() => table(&vec![
|
||||
row(indexmap! {"dev".into() => table(&vec![string("Andrés")])}),
|
||||
row(indexmap! {"dev".into() => table(&vec![string("Jonathan")])}),
|
||||
row(indexmap! {"dev".into() => table(&vec![string("Yehuda")])})
|
||||
])
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::meta::tag_for_tagged_list;
|
||||
use crate::data::Value;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use crate::utils::did_you_mean;
|
||||
use log::trace;
|
||||
|
||||
pub struct Get;
|
||||
@ -20,8 +20,15 @@ impl WholeStreamCommand for Get {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("get")
|
||||
.required("member", SyntaxShape::ColumnPath)
|
||||
.rest(SyntaxShape::ColumnPath)
|
||||
.required(
|
||||
"member",
|
||||
SyntaxShape::ColumnPath,
|
||||
"the path to the data to get",
|
||||
)
|
||||
.rest(
|
||||
SyntaxShape::ColumnPath,
|
||||
"optionally return additional data by path",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -37,62 +44,98 @@ impl WholeStreamCommand for Get {
|
||||
}
|
||||
}
|
||||
|
||||
pub type ColumnPath = Vec<Tagged<String>>;
|
||||
pub type ColumnPath = Vec<Tagged<Value>>;
|
||||
|
||||
pub fn get_column_path(
|
||||
path: &ColumnPath,
|
||||
obj: &Tagged<Value>,
|
||||
) -> Result<Tagged<Value>, ShellError> {
|
||||
let mut current = Some(obj);
|
||||
for p in path.iter() {
|
||||
if let Some(obj) = current {
|
||||
current = match obj.get_data_by_key(&p) {
|
||||
Some(v) => Some(v),
|
||||
None =>
|
||||
// Before we give up, see if they gave us a path that matches a field name by itself
|
||||
{
|
||||
let possibilities = obj.data_descriptors();
|
||||
let fields = path.clone();
|
||||
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
.iter()
|
||||
.map(|x| (natural::distance::levenshtein_distance(x, &p), x))
|
||||
.collect();
|
||||
let value = obj.get_data_by_column_path(
|
||||
obj.tag(),
|
||||
path,
|
||||
Box::new(move |(obj_source, column_path_tried)| {
|
||||
match obj_source {
|
||||
Value::Table(rows) => {
|
||||
let total = rows.len();
|
||||
let end_tag = match fields.iter().nth_back(if fields.len() > 2 { 1 } else { 0 })
|
||||
{
|
||||
Some(last_field) => last_field.tag(),
|
||||
None => column_path_tried.tag(),
|
||||
};
|
||||
|
||||
possible_matches.sort();
|
||||
return ShellError::labeled_error_with_secondary(
|
||||
"Row not found",
|
||||
format!(
|
||||
"There isn't a row indexed at '{}'",
|
||||
match &*column_path_tried {
|
||||
Value::Primitive(primitive) => primitive.format(None),
|
||||
_ => String::from(""),
|
||||
}
|
||||
),
|
||||
column_path_tried.tag(),
|
||||
format!("The table only has {} rows (0..{})", total, total - 1),
|
||||
end_tag,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if possible_matches.len() > 0 {
|
||||
return Err(ShellError::labeled_error(
|
||||
match &column_path_tried {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::Int(index)),
|
||||
..
|
||||
} => {
|
||||
return ShellError::labeled_error(
|
||||
"No rows available",
|
||||
format!(
|
||||
"Not a table. Perhaps you meant to get the column '{}' instead?",
|
||||
index
|
||||
),
|
||||
column_path_tried.tag(),
|
||||
)
|
||||
}
|
||||
_ => match did_you_mean(&obj_source, &column_path_tried) {
|
||||
Some(suggestions) => {
|
||||
return ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", possible_matches[0].1),
|
||||
tag_for_tagged_list(path.iter().map(|p| p.tag())),
|
||||
));
|
||||
} else {
|
||||
return Err(ShellError::labeled_error(
|
||||
format!("did you mean '{}'?", suggestions[0].1),
|
||||
tag_for_tagged_list(fields.iter().map(|p| p.tag())),
|
||||
)
|
||||
}
|
||||
None => {
|
||||
return ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
"row does not contain this column",
|
||||
tag_for_tagged_list(path.iter().map(|p| p.tag())),
|
||||
));
|
||||
tag_for_tagged_list(fields.iter().map(|p| p.tag())),
|
||||
)
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
match current {
|
||||
Some(v) => Ok(v.clone()),
|
||||
None => match obj {
|
||||
// If its None check for certain values.
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(_)),
|
||||
..
|
||||
} => Ok(obj.clone()),
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::Path(_)),
|
||||
..
|
||||
} => Ok(obj.clone()),
|
||||
_ => Ok(Value::nothing().tagged(&obj.tag)),
|
||||
let res = match value {
|
||||
Ok(fetched) => match fetched {
|
||||
Some(Tagged { item: v, .. }) => Ok((v.clone()).tagged(&obj.tag)),
|
||||
None => match obj {
|
||||
// If its None check for certain values.
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(_)),
|
||||
..
|
||||
} => Ok(obj.clone()),
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::Path(_)),
|
||||
..
|
||||
} => Ok(obj.clone()),
|
||||
_ => Ok(Value::nothing().tagged(&obj.tag)),
|
||||
},
|
||||
},
|
||||
}
|
||||
Err(reason) => Err(reason),
|
||||
};
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
pub fn get(
|
||||
@ -111,26 +154,36 @@ pub fn get(
|
||||
|
||||
let member = vec![member.clone()];
|
||||
|
||||
let fields = vec![&member, &fields]
|
||||
let column_paths = vec![&member, &fields]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<&ColumnPath>>();
|
||||
|
||||
for column_path in &fields {
|
||||
match get_column_path(column_path, &item) {
|
||||
Ok(Tagged {
|
||||
item: Value::Table(l),
|
||||
..
|
||||
}) => {
|
||||
for item in l {
|
||||
result.push_back(ReturnSuccess::value(item.clone()));
|
||||
for path in column_paths {
|
||||
let res = get_column_path(&path, &item);
|
||||
|
||||
match res {
|
||||
Ok(got) => match got {
|
||||
Tagged {
|
||||
item: Value::Table(rows),
|
||||
..
|
||||
} => {
|
||||
for row in rows {
|
||||
result.push_back(ReturnSuccess::value(
|
||||
Tagged {
|
||||
item: row.item,
|
||||
tag: Tag::from(&item.tag),
|
||||
}
|
||||
.map_anchored(&item.tag.anchor),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(x) => result.push_back(ReturnSuccess::value(x.clone())),
|
||||
Err(x) => result.push_back(Err(x)),
|
||||
other => result
|
||||
.push_back(ReturnSuccess::value((*other).clone().tagged(&item.tag))),
|
||||
},
|
||||
Err(reason) => result.push_back(Err(reason)),
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
})
|
||||
.flatten();
|
||||
|
189
src/commands/group_by.rs
Normal file
189
src/commands/group_by.rs
Normal file
@ -0,0 +1,189 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::TaggedDictBuilder;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
|
||||
pub struct GroupBy;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GroupByArgs {
|
||||
column_name: Tagged<String>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for GroupBy {
|
||||
fn name(&self) -> &str {
|
||||
"group-by"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("group-by").required(
|
||||
"column_name",
|
||||
SyntaxShape::String,
|
||||
"the name of the column to group by",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with the data from the table rows grouped by the column given."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, group_by)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn group_by(
|
||||
GroupByArgs { column_name }: GroupByArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
if values.is_empty() {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Expected table from pipeline",
|
||||
"requires a table input",
|
||||
column_name.span()
|
||||
))
|
||||
} else {
|
||||
match group(&column_name, values, name) {
|
||||
Ok(grouped) => yield ReturnSuccess::value(grouped),
|
||||
Err(err) => yield Err(err)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
pub fn group(
|
||||
column_name: &Tagged<String>,
|
||||
values: Vec<Tagged<Value>>,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<Value>, ShellError> {
|
||||
let tag = tag.into();
|
||||
|
||||
let mut groups = indexmap::IndexMap::new();
|
||||
|
||||
for value in values {
|
||||
let group_key = value.get_data_by_key(column_name);
|
||||
|
||||
if group_key.is_none() {
|
||||
let possibilities = value.data_descriptors();
|
||||
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
.iter()
|
||||
.map(|x| (natural::distance::levenshtein_distance(x, column_name), x))
|
||||
.collect();
|
||||
|
||||
possible_matches.sort();
|
||||
|
||||
if possible_matches.len() > 0 {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", possible_matches[0].1),
|
||||
column_name.tag(),
|
||||
));
|
||||
} else {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
"row does not contain this column",
|
||||
column_name.tag(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let group_key = group_key.unwrap().as_string()?;
|
||||
let group = groups.entry(group_key).or_insert(vec![]);
|
||||
group.push(value);
|
||||
}
|
||||
|
||||
let mut out = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (k, v) in groups.iter() {
|
||||
out.insert(k, Value::table(v));
|
||||
}
|
||||
|
||||
Ok(out.into_tagged_value())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::commands::group_by::group;
|
||||
use crate::data::meta::*;
|
||||
use crate::Value;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
fn string(input: impl Into<String>) -> Tagged<Value> {
|
||||
Value::string(input.into()).tagged_unknown()
|
||||
}
|
||||
|
||||
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
|
||||
Value::row(entries).tagged_unknown()
|
||||
}
|
||||
|
||||
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
|
||||
Value::table(list).tagged_unknown()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn groups_table_by_key() {
|
||||
let for_key = String::from("date").tagged_unknown();
|
||||
|
||||
let nu_releases = vec![
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
];
|
||||
|
||||
assert_eq!(
|
||||
group(&for_key, nu_releases, Tag::unknown()).unwrap(),
|
||||
row(indexmap! {
|
||||
"August 23-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")})
|
||||
]),
|
||||
"October 10-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
|
||||
]),
|
||||
"Sept 24-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
|
||||
]),
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
@ -12,7 +12,7 @@ impl PerItemCommand for Help {
|
||||
}
|
||||
|
||||
fn signature(&self) -> registry::Signature {
|
||||
Signature::build("help").rest(SyntaxShape::Any)
|
||||
Signature::build("help").rest(SyntaxShape::Any, "the name of command(s) to get help on")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -61,12 +61,9 @@ impl PerItemCommand for Help {
|
||||
let mut one_liner = String::new();
|
||||
one_liner.push_str(&signature.name);
|
||||
one_liner.push_str(" ");
|
||||
if signature.named.len() > 0 {
|
||||
one_liner.push_str("{flags} ");
|
||||
}
|
||||
|
||||
for positional in signature.positional {
|
||||
match positional {
|
||||
for positional in &signature.positional {
|
||||
match &positional.0 {
|
||||
PositionalType::Mandatory(name, _m) => {
|
||||
one_liner.push_str(&format!("<{}> ", name));
|
||||
}
|
||||
@ -77,25 +74,70 @@ impl PerItemCommand for Help {
|
||||
}
|
||||
|
||||
if signature.rest_positional.is_some() {
|
||||
one_liner.push_str(" ...args");
|
||||
one_liner.push_str(&format!(" ...args",));
|
||||
}
|
||||
|
||||
if signature.named.len() > 0 {
|
||||
one_liner.push_str("{flags} ");
|
||||
}
|
||||
|
||||
long_desc.push_str(&format!("\nUsage:\n > {}\n", one_liner));
|
||||
|
||||
if signature.positional.len() > 0 || signature.rest_positional.is_some() {
|
||||
long_desc.push_str("\nparameters:\n");
|
||||
for positional in signature.positional {
|
||||
match positional.0 {
|
||||
PositionalType::Mandatory(name, _m) => {
|
||||
long_desc
|
||||
.push_str(&format!(" <{}> {}\n", name, positional.1));
|
||||
}
|
||||
PositionalType::Optional(name, _o) => {
|
||||
long_desc
|
||||
.push_str(&format!(" ({}) {}\n", name, positional.1));
|
||||
}
|
||||
}
|
||||
}
|
||||
if signature.rest_positional.is_some() {
|
||||
long_desc.push_str(&format!(
|
||||
" ...args{} {}\n",
|
||||
if signature.rest_positional.is_some() {
|
||||
":"
|
||||
} else {
|
||||
""
|
||||
},
|
||||
signature.rest_positional.unwrap().1
|
||||
));
|
||||
}
|
||||
}
|
||||
if signature.named.len() > 0 {
|
||||
long_desc.push_str("\nflags:\n");
|
||||
for (flag, ty) in signature.named {
|
||||
match ty {
|
||||
match ty.0 {
|
||||
NamedType::Switch => {
|
||||
long_desc.push_str(&format!(" --{}\n", flag));
|
||||
long_desc.push_str(&format!(
|
||||
" --{}{} {}\n",
|
||||
flag,
|
||||
if ty.1.len() > 0 { ":" } else { "" },
|
||||
ty.1
|
||||
));
|
||||
}
|
||||
NamedType::Mandatory(m) => {
|
||||
long_desc.push_str(&format!(
|
||||
" --{} <{}> (required parameter)\n",
|
||||
flag, m
|
||||
" --{} <{}> (required parameter){} {}\n",
|
||||
flag,
|
||||
m,
|
||||
if ty.1.len() > 0 { ":" } else { "" },
|
||||
ty.1
|
||||
));
|
||||
}
|
||||
NamedType::Optional(o) => {
|
||||
long_desc.push_str(&format!(" --{} <{}>\n", flag, o));
|
||||
long_desc.push_str(&format!(
|
||||
" --{} <{}>{} {}\n",
|
||||
flag,
|
||||
o,
|
||||
if ty.1.len() > 0 { ":" } else { "" },
|
||||
ty.1
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
49
src/commands/history.rs
Normal file
49
src/commands/history.rs
Normal file
@ -0,0 +1,49 @@
|
||||
use crate::cli::History as HistoryFile;
|
||||
use crate::commands::PerItemCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::registry::{self};
|
||||
use crate::prelude::*;
|
||||
use std::fs::File;
|
||||
use std::io::{BufRead, BufReader};
|
||||
|
||||
pub struct History;
|
||||
|
||||
impl PerItemCommand for History {
|
||||
fn name(&self) -> &str {
|
||||
"history"
|
||||
}
|
||||
|
||||
fn signature(&self) -> registry::Signature {
|
||||
Signature::build("history")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Display command history."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
call_info: &CallInfo,
|
||||
_registry: &CommandRegistry,
|
||||
_raw_args: &RawCommandArgs,
|
||||
_input: Tagged<Value>,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = call_info.name_tag.clone();
|
||||
|
||||
let stream = async_stream! {
|
||||
let history_path = HistoryFile::path();
|
||||
let file = File::open(history_path);
|
||||
if let Ok(file) = file {
|
||||
let reader = BufReader::new(file);
|
||||
for line in reader.lines() {
|
||||
if let Ok(line) = line {
|
||||
yield ReturnSuccess::value(Value::string(line).tagged(tag.clone()));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error("Could not open history", "history file could not be opened", tag.clone()));
|
||||
}
|
||||
};
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
}
|
@ -16,7 +16,11 @@ impl WholeStreamCommand for Last {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("last").optional("rows", SyntaxShape::Number)
|
||||
Signature::build("last").optional(
|
||||
"rows",
|
||||
SyntaxShape::Number,
|
||||
"starting from the back, the number of rows to return",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -16,7 +16,11 @@ impl WholeStreamCommand for LS {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("ls").optional("path", SyntaxShape::Pattern)
|
||||
Signature::build("ls").optional(
|
||||
"path",
|
||||
SyntaxShape::Pattern,
|
||||
"a path to get the directory contents from",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -17,7 +17,7 @@ impl PerItemCommand for Mkdir {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("mkdir").rest(SyntaxShape::Path)
|
||||
Signature::build("mkdir").rest(SyntaxShape::Path, "the name(s) of the path(s) to create")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -20,9 +20,16 @@ impl PerItemCommand for Move {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("mv")
|
||||
.required("source", SyntaxShape::Pattern)
|
||||
.required("destination", SyntaxShape::Path)
|
||||
.named("file", SyntaxShape::Any)
|
||||
.required(
|
||||
"source",
|
||||
SyntaxShape::Pattern,
|
||||
"the location to move files/directories from",
|
||||
)
|
||||
.required(
|
||||
"destination",
|
||||
SyntaxShape::Path,
|
||||
"the location to move files/directories to",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -16,7 +16,11 @@ impl WholeStreamCommand for Nth {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("nth").required("row number", SyntaxShape::Any)
|
||||
Signature::build("nth").required(
|
||||
"row number",
|
||||
SyntaxShape::Any,
|
||||
"the number of the row to return",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -16,8 +16,12 @@ impl PerItemCommand for Open {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.required("path", SyntaxShape::Path)
|
||||
.switch("raw")
|
||||
.required(
|
||||
"path",
|
||||
SyntaxShape::Path,
|
||||
"the file path to load values from",
|
||||
)
|
||||
.switch("raw", "load content as a string insead of a table")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -97,7 +101,7 @@ fn run(
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false);
|
||||
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry);
|
||||
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
|
||||
for res in result_vec {
|
||||
match res {
|
||||
|
@ -17,7 +17,7 @@ impl WholeStreamCommand for Pick {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("pick").rest(SyntaxShape::Any)
|
||||
Signature::build("pick").rest(SyntaxShape::Any, "the columns to select from the table")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -21,9 +21,12 @@ impl WholeStreamCommand for Pivot {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("pivot")
|
||||
.switch("header-row")
|
||||
.switch("ignore-titles")
|
||||
.rest(SyntaxShape::String)
|
||||
.switch("header-row", "treat the first row as column names")
|
||||
.switch("ignore-titles", "don't pivot the column names into values")
|
||||
.rest(
|
||||
SyntaxShape::String,
|
||||
"the names to give columns once pivoted",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -25,13 +25,25 @@ impl PerItemCommand for Post {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.required("path", SyntaxShape::Any)
|
||||
.required("body", SyntaxShape::Any)
|
||||
.named("user", SyntaxShape::Any)
|
||||
.named("password", SyntaxShape::Any)
|
||||
.named("content-type", SyntaxShape::Any)
|
||||
.named("content-length", SyntaxShape::Any)
|
||||
.switch("raw")
|
||||
.required("path", SyntaxShape::Any, "the URL to post to")
|
||||
.required("body", SyntaxShape::Any, "the contents of the post body")
|
||||
.named("user", SyntaxShape::Any, "the username when authenticating")
|
||||
.named(
|
||||
"password",
|
||||
SyntaxShape::Any,
|
||||
"the password when authenticating",
|
||||
)
|
||||
.named(
|
||||
"content-type",
|
||||
SyntaxShape::Any,
|
||||
"the MIME type of content to post",
|
||||
)
|
||||
.named(
|
||||
"content-length",
|
||||
SyntaxShape::Any,
|
||||
"the length of the content being posted",
|
||||
)
|
||||
.switch("raw", "return values as a string instead of a table")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -112,7 +124,7 @@ fn run(
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false);
|
||||
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry);
|
||||
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
|
||||
for res in result_vec {
|
||||
match res {
|
||||
@ -258,7 +270,6 @@ pub async fn post(
|
||||
let mut result = converter.run(
|
||||
new_args.with_input(vec![item.clone().tagged(tag.clone())]),
|
||||
®istry,
|
||||
false,
|
||||
);
|
||||
let result_vec: Vec<Result<ReturnSuccess, ShellError>> =
|
||||
result.drain_vec().await;
|
||||
|
47
src/commands/prepend.rs
Normal file
47
src/commands/prepend.rs
Normal file
@ -0,0 +1,47 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PrependArgs {
|
||||
row: Tagged<Value>,
|
||||
}
|
||||
|
||||
pub struct Prepend;
|
||||
|
||||
impl WholeStreamCommand for Prepend {
|
||||
fn name(&self) -> &str {
|
||||
"prepend"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("prepend").required(
|
||||
"row value",
|
||||
SyntaxShape::Any,
|
||||
"the value of the row to prepend to the table",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Prepend the given row to the front of the table"
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, prepend)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn prepend(
|
||||
PrependArgs { row }: PrependArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let mut prepend: VecDeque<Tagged<Value>> = VecDeque::new();
|
||||
prepend.push_back(row);
|
||||
|
||||
Ok(OutputStream::from_input(prepend.chain(input.values)))
|
||||
}
|
@ -16,7 +16,7 @@ impl WholeStreamCommand for Reject {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("reject").rest(SyntaxShape::Member)
|
||||
Signature::build("reject").rest(SyntaxShape::Member, "the names of columns to remove")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -11,6 +11,7 @@ pub struct Remove;
|
||||
pub struct RemoveArgs {
|
||||
pub target: Tagged<PathBuf>,
|
||||
pub recursive: Tagged<bool>,
|
||||
pub trash: Tagged<bool>,
|
||||
}
|
||||
|
||||
impl PerItemCommand for Remove {
|
||||
@ -20,12 +21,16 @@ impl PerItemCommand for Remove {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("rm")
|
||||
.required("path", SyntaxShape::Pattern)
|
||||
.switch("recursive")
|
||||
.required("path", SyntaxShape::Pattern, "the file path to remove")
|
||||
.switch(
|
||||
"trash",
|
||||
"use the platform's recycle bin instead of permanently deleting",
|
||||
)
|
||||
.switch("recursive", "delete subdirectories recursively")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Remove a file, (for removing directory append '--recursive')"
|
||||
"Remove a file"
|
||||
}
|
||||
|
||||
fn run(
|
||||
|
@ -93,8 +93,11 @@ impl WholeStreamCommand for Save {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("save")
|
||||
.optional("path", SyntaxShape::Path)
|
||||
.switch("raw")
|
||||
.optional("path", SyntaxShape::Path, "the path to save contents to")
|
||||
.switch(
|
||||
"raw",
|
||||
"treat values as-is rather than auto-converting based on file extension",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -190,7 +193,7 @@ fn save(
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
let mut result = converter.run(new_args.with_input(input), ®istry, false);
|
||||
let mut result = converter.run(new_args.with_input(input), ®istry);
|
||||
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
|
||||
if converter.is_binary() {
|
||||
process_binary_return_success!('scope, result_vec, name_tag)
|
||||
|
@ -17,7 +17,11 @@ impl WholeStreamCommand for SkipWhile {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("skip-while")
|
||||
.required("condition", SyntaxShape::Block)
|
||||
.required(
|
||||
"condition",
|
||||
SyntaxShape::Block,
|
||||
"the condition that must be met to continue skipping",
|
||||
)
|
||||
.filter()
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ impl WholeStreamCommand for SortBy {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("sort-by").rest(SyntaxShape::String)
|
||||
Signature::build("sort-by").rest(SyntaxShape::String, "the column(s) to sort by")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
256
src/commands/split_by.rs
Normal file
256
src/commands/split_by.rs
Normal file
@ -0,0 +1,256 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::TaggedDictBuilder;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
|
||||
pub struct SplitBy;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SplitByArgs {
|
||||
column_name: Tagged<String>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for SplitBy {
|
||||
fn name(&self) -> &str {
|
||||
"split-by"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("split-by").required(
|
||||
"column_name",
|
||||
SyntaxShape::String,
|
||||
"the name of the column within the nested table to split by",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with the data from the inner tables splitted by the column given."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, split_by)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn split_by(
|
||||
SplitByArgs { column_name }: SplitByArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
if values.len() > 1 || values.is_empty() {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Expected table from pipeline",
|
||||
"requires a table input",
|
||||
column_name.span()
|
||||
))
|
||||
} else {
|
||||
match split(&column_name, &values[0], name) {
|
||||
Ok(split) => yield ReturnSuccess::value(split),
|
||||
Err(err) => yield Err(err),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
pub fn split(
|
||||
column_name: &Tagged<String>,
|
||||
value: &Tagged<Value>,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<Value>, ShellError> {
|
||||
let origin_tag = tag.into();
|
||||
|
||||
let mut splits = indexmap::IndexMap::new();
|
||||
|
||||
match value {
|
||||
Tagged {
|
||||
item: Value::Row(group_sets),
|
||||
..
|
||||
} => {
|
||||
for (group_key, group_value) in group_sets.entries.iter() {
|
||||
match *group_value {
|
||||
Tagged {
|
||||
item: Value::Table(ref dataset),
|
||||
..
|
||||
} => {
|
||||
let group = crate::commands::group_by::group(
|
||||
&column_name,
|
||||
dataset.to_vec(),
|
||||
&origin_tag,
|
||||
)?;
|
||||
|
||||
match group {
|
||||
Tagged {
|
||||
item: Value::Row(o),
|
||||
..
|
||||
} => {
|
||||
for (split_label, subset) in o.entries.into_iter() {
|
||||
match subset {
|
||||
Tagged {
|
||||
item: Value::Table(subset),
|
||||
tag,
|
||||
} => {
|
||||
let s = splits
|
||||
.entry(split_label.clone())
|
||||
.or_insert(indexmap::IndexMap::new());
|
||||
s.insert(
|
||||
group_key.clone(),
|
||||
Value::table(&subset).tagged(tag),
|
||||
);
|
||||
}
|
||||
other => {
|
||||
return Err(ShellError::type_error(
|
||||
"a table value",
|
||||
other.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"a table value",
|
||||
group.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
ref other => {
|
||||
return Err(ShellError::type_error(
|
||||
"a table value",
|
||||
other.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"a table value",
|
||||
value.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
let mut out = TaggedDictBuilder::new(&origin_tag);
|
||||
|
||||
for (k, v) in splits.into_iter() {
|
||||
out.insert(k, Value::row(v));
|
||||
}
|
||||
|
||||
Ok(out.into_tagged_value())
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::commands::split_by::split;
|
||||
use crate::data::meta::*;
|
||||
use crate::Value;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
fn string(input: impl Into<String>) -> Tagged<Value> {
|
||||
Value::string(input.into()).tagged_unknown()
|
||||
}
|
||||
|
||||
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
|
||||
Value::row(entries).tagged_unknown()
|
||||
}
|
||||
|
||||
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
|
||||
Value::table(list).tagged_unknown()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn splits_inner_tables_by_key() {
|
||||
let for_key = String::from("country").tagged_unknown();
|
||||
|
||||
let nu_releases = row(indexmap! {
|
||||
"August 23-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")})
|
||||
]),
|
||||
"Sept 24-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")})
|
||||
]),
|
||||
"October 10-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")})
|
||||
])
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
split(&for_key, &nu_releases, Tag::unknown()).unwrap(),
|
||||
Value::row(indexmap! {
|
||||
"EC".into() => row(indexmap! {
|
||||
"August 23-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")})
|
||||
]),
|
||||
"Sept 24-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")})
|
||||
]),
|
||||
"October 10-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
|
||||
])
|
||||
}),
|
||||
"NZ".into() => row(indexmap! {
|
||||
"August 23-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")})
|
||||
]),
|
||||
"Sept 24-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
|
||||
]),
|
||||
"October 10-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")})
|
||||
])
|
||||
}),
|
||||
"US".into() => row(indexmap! {
|
||||
"August 23-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")})
|
||||
]),
|
||||
"Sept 24-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")})
|
||||
]),
|
||||
"October 10-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")})
|
||||
])
|
||||
})
|
||||
}).tagged_unknown()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn errors_if_key_within_some_inner_table_is_missing() {
|
||||
let for_key = String::from("country").tagged_unknown();
|
||||
|
||||
let nu_releases = row(indexmap! {
|
||||
"August 23-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")})
|
||||
]),
|
||||
"Sept 24-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => Value::string("JT").tagged(Tag::from(Span::new(5,10))), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")})
|
||||
]),
|
||||
"October 10-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")})
|
||||
])
|
||||
});
|
||||
|
||||
assert!(split(&for_key, &nu_releases, Tag::from(Span::new(5, 10))).is_err());
|
||||
}
|
||||
}
|
@ -21,9 +21,13 @@ impl WholeStreamCommand for SplitColumn {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("split-column")
|
||||
.required("separator", SyntaxShape::Any)
|
||||
.switch("collapse-empty")
|
||||
.rest(SyntaxShape::Member)
|
||||
.required(
|
||||
"separator",
|
||||
SyntaxShape::Any,
|
||||
"the character that denotes what separates columns",
|
||||
)
|
||||
.switch("collapse-empty", "remove empty columns")
|
||||
.rest(SyntaxShape::Member, "column names to give the new columns")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -17,7 +17,11 @@ impl WholeStreamCommand for SplitRow {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("split-row").required("separator", SyntaxShape::Any)
|
||||
Signature::build("split-row").required(
|
||||
"separator",
|
||||
SyntaxShape::Any,
|
||||
"the character that denotes what separates rows",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -11,7 +11,11 @@ impl WholeStreamCommand for Table {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("table").named("start_number", SyntaxShape::Number)
|
||||
Signature::build("table").named(
|
||||
"start_number",
|
||||
SyntaxShape::Number,
|
||||
"row number to start viewing from",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -16,7 +16,10 @@ impl WholeStreamCommand for ToCSV {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("to-csv").switch("headerless")
|
||||
Signature::build("to-csv").switch(
|
||||
"headerless",
|
||||
"do not output the columns names as the first row",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -16,7 +16,10 @@ impl WholeStreamCommand for ToTSV {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("to-tsv").switch("headerless")
|
||||
Signature::build("to-tsv").switch(
|
||||
"headerless",
|
||||
"do not output the column names as the first row",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -12,7 +12,11 @@ impl PerItemCommand for Where {
|
||||
}
|
||||
|
||||
fn signature(&self) -> registry::Signature {
|
||||
Signature::build("where").required("condition", SyntaxShape::Block)
|
||||
Signature::build("where").required(
|
||||
"condition",
|
||||
SyntaxShape::Block,
|
||||
"the condition that must match",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -13,7 +13,11 @@ impl WholeStreamCommand for Which {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("which").required("name", SyntaxShape::Any)
|
||||
Signature::build("which").required(
|
||||
"name",
|
||||
SyntaxShape::Any,
|
||||
"the name of the command to find the path to",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -71,9 +71,8 @@ impl Context {
|
||||
pub(crate) fn expand_context<'context>(
|
||||
&'context self,
|
||||
source: &'context Text,
|
||||
span: Span,
|
||||
) -> ExpandContext<'context> {
|
||||
ExpandContext::new(&self.registry, span, source, self.shell_manager.homedir())
|
||||
ExpandContext::new(&self.registry, source, self.shell_manager.homedir())
|
||||
}
|
||||
|
||||
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
|
||||
@ -113,10 +112,9 @@ impl Context {
|
||||
args: hir::Call,
|
||||
source: &Text,
|
||||
input: InputStream,
|
||||
is_first_command: bool,
|
||||
) -> OutputStream {
|
||||
let command_args = self.command_args(args, input, source, name_tag);
|
||||
command.run(command_args, self.registry(), is_first_command)
|
||||
command.run(command_args, self.registry())
|
||||
}
|
||||
|
||||
fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo {
|
||||
|
512
src/data/base.rs
512
src/data/base.rs
@ -8,6 +8,7 @@ use crate::Text;
|
||||
use chrono::{DateTime, Utc};
|
||||
use chrono_humanize::Humanize;
|
||||
use derive_new::new;
|
||||
use indexmap::IndexMap;
|
||||
use log::trace;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
@ -408,25 +409,17 @@ impl Tagged<Value> {
|
||||
ValueDebug { value: self }
|
||||
}
|
||||
|
||||
pub fn as_column_path(&self) -> Result<Tagged<Vec<Tagged<String>>>, ShellError> {
|
||||
let mut out: Vec<Tagged<String>> = vec![];
|
||||
|
||||
pub fn as_column_path(&self) -> Result<Tagged<Vec<Tagged<Value>>>, ShellError> {
|
||||
match &self.item {
|
||||
Value::Table(table) => {
|
||||
for item in table {
|
||||
out.push(item.as_string()?.tagged(&item.tag));
|
||||
}
|
||||
}
|
||||
|
||||
other => {
|
||||
return Err(ShellError::type_error(
|
||||
"column name",
|
||||
other.type_name().tagged(&self.tag),
|
||||
))
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
Ok(vec![Value::string(s).tagged(&self.tag)].tagged(&self.tag))
|
||||
}
|
||||
Value::Table(table) => Ok(table.to_vec().tagged(&self.tag)),
|
||||
other => Err(ShellError::type_error(
|
||||
"column name",
|
||||
other.type_name().tagged(&self.tag),
|
||||
)),
|
||||
}
|
||||
|
||||
Ok(out.tagged(&self.tag))
|
||||
}
|
||||
|
||||
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
|
||||
@ -452,17 +445,16 @@ impl Value {
|
||||
match self {
|
||||
Value::Primitive(p) => p.type_name(),
|
||||
Value::Row(_) => format!("row"),
|
||||
Value::Table(_) => format!("list"),
|
||||
Value::Table(_) => format!("table"),
|
||||
Value::Block(_) => format!("block"),
|
||||
Value::Error(_) => format!("error"),
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: This is basically a legacy construct, I think
|
||||
pub fn data_descriptors(&self) -> Vec<String> {
|
||||
match self {
|
||||
Value::Primitive(_) => vec![],
|
||||
Value::Row(o) => o
|
||||
Value::Row(columns) => columns
|
||||
.entries
|
||||
.keys()
|
||||
.into_iter()
|
||||
@ -474,6 +466,13 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_data_by_index(&self, idx: usize) -> Option<&Tagged<Value>> {
|
||||
match self {
|
||||
Value::Table(value_set) => value_set.get(idx),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_data_by_key(&self, name: &str) -> Option<&Tagged<Value>> {
|
||||
match self {
|
||||
Value::Row(o) => o.get_data_by_key(name),
|
||||
@ -496,93 +495,87 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Tagged<Value>> {
|
||||
match self {
|
||||
Value::Row(ref mut o) => o.get_mut_data_by_key(name),
|
||||
Value::Table(ref mut l) => {
|
||||
for item in l {
|
||||
match item {
|
||||
Tagged {
|
||||
item: Value::Row(ref mut o),
|
||||
..
|
||||
} => match o.get_mut_data_by_key(name) {
|
||||
Some(v) => return Some(v),
|
||||
None => {}
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_data_by_column_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
path: &Vec<Tagged<String>>,
|
||||
) -> Option<Tagged<&Value>> {
|
||||
path: &Vec<Tagged<Value>>,
|
||||
callback: Box<dyn FnOnce((Value, Tagged<Value>)) -> ShellError>,
|
||||
) -> Result<Option<Tagged<&Value>>, ShellError> {
|
||||
let mut column_path = vec![];
|
||||
|
||||
for value in path {
|
||||
column_path.push(
|
||||
Value::string(value.as_string().unwrap_or("".to_string())).tagged(&value.tag),
|
||||
);
|
||||
}
|
||||
|
||||
let path = column_path;
|
||||
|
||||
let mut current = self;
|
||||
|
||||
for p in path {
|
||||
match current.get_data_by_key(p) {
|
||||
let value = p.as_string().unwrap_or("".to_string());
|
||||
let value = match value.parse::<usize>() {
|
||||
Ok(number) => match current {
|
||||
Value::Table(_) => current.get_data_by_index(number),
|
||||
Value::Row(_) => current.get_data_by_key(&value),
|
||||
_ => None,
|
||||
},
|
||||
Err(_) => match self {
|
||||
Value::Table(_) | Value::Row(_) => current.get_data_by_key(&value),
|
||||
_ => None,
|
||||
},
|
||||
};
|
||||
|
||||
match value {
|
||||
Some(v) => current = v,
|
||||
None => return None,
|
||||
None => return Err(callback((current.clone(), p.clone()))),
|
||||
}
|
||||
}
|
||||
|
||||
Some(current.tagged(tag))
|
||||
}
|
||||
|
||||
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
|
||||
let mut current = self;
|
||||
for p in path.split(".") {
|
||||
match current.get_data_by_key(p) {
|
||||
Some(v) => current = v,
|
||||
None => return None,
|
||||
}
|
||||
}
|
||||
|
||||
Some(current.tagged(tag))
|
||||
}
|
||||
|
||||
pub fn insert_data_at_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
path: &str,
|
||||
new_value: Value,
|
||||
) -> Option<Tagged<Value>> {
|
||||
let mut new_obj = self.clone();
|
||||
|
||||
let split_path: Vec<_> = path.split(".").collect();
|
||||
|
||||
if let Value::Row(ref mut o) = new_obj {
|
||||
let mut current = o;
|
||||
|
||||
if split_path.len() == 1 {
|
||||
// Special case for inserting at the top level
|
||||
current
|
||||
.entries
|
||||
.insert(path.to_string(), new_value.tagged(&tag));
|
||||
return Some(new_obj.tagged(&tag));
|
||||
}
|
||||
|
||||
for idx in 0..split_path.len() {
|
||||
match current.entries.get_mut(split_path[idx]) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 2) {
|
||||
match &mut next.item {
|
||||
Value::Row(o) => {
|
||||
o.entries.insert(
|
||||
split_path[idx + 1].to_string(),
|
||||
new_value.tagged(&tag),
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
current = o;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
Ok(Some(current.tagged(tag)))
|
||||
}
|
||||
|
||||
pub fn insert_data_at_column_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
split_path: &Vec<Tagged<String>>,
|
||||
split_path: &Vec<Tagged<Value>>,
|
||||
new_value: Value,
|
||||
) -> Option<Tagged<Value>> {
|
||||
let split_path = split_path
|
||||
.into_iter()
|
||||
.map(|p| match p {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(s)),
|
||||
tag,
|
||||
} => Ok(s.clone().tagged(tag)),
|
||||
o => Err(o),
|
||||
})
|
||||
.filter_map(Result::ok)
|
||||
.collect::<Vec<Tagged<String>>>();
|
||||
|
||||
let mut new_obj = self.clone();
|
||||
|
||||
if let Value::Row(ref mut o) = new_obj {
|
||||
@ -628,67 +621,39 @@ impl Value {
|
||||
None
|
||||
}
|
||||
|
||||
pub fn replace_data_at_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
path: &str,
|
||||
replaced_value: Value,
|
||||
) -> Option<Tagged<Value>> {
|
||||
let mut new_obj = self.clone();
|
||||
|
||||
let split_path: Vec<_> = path.split(".").collect();
|
||||
|
||||
if let Value::Row(ref mut o) = new_obj {
|
||||
let mut current = o;
|
||||
for idx in 0..split_path.len() {
|
||||
match current.entries.get_mut(split_path[idx]) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 1) {
|
||||
*next = replaced_value.tagged(&tag);
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
current = o;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn replace_data_at_column_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
split_path: &Vec<Tagged<String>>,
|
||||
split_path: &Vec<Tagged<Value>>,
|
||||
replaced_value: Value,
|
||||
) -> Option<Tagged<Value>> {
|
||||
let mut new_obj = self.clone();
|
||||
let split_path = split_path
|
||||
.into_iter()
|
||||
.map(|p| match p {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(s)),
|
||||
tag,
|
||||
} => Ok(s.clone().tagged(tag)),
|
||||
o => Err(o),
|
||||
})
|
||||
.filter_map(Result::ok)
|
||||
.collect::<Vec<Tagged<String>>>();
|
||||
|
||||
if let Value::Row(ref mut o) = new_obj {
|
||||
let mut current = o;
|
||||
for idx in 0..split_path.len() {
|
||||
match current.entries.get_mut(&split_path[idx].item) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 1) {
|
||||
*next = replaced_value.tagged(&tag);
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
current = o;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
let mut new_obj = self.clone();
|
||||
let mut current = &mut new_obj;
|
||||
|
||||
for idx in 0..split_path.len() {
|
||||
match current.get_mut_data_by_key(&split_path[idx].item) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 1) {
|
||||
*next = replaced_value.tagged(&tag);
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
current = &mut next.item;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
None => {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -769,6 +734,15 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn row(entries: IndexMap<String, Tagged<Value>>) -> Value {
|
||||
Value::Row(entries.into())
|
||||
}
|
||||
|
||||
pub fn table(list: &Vec<Tagged<Value>>) -> Value {
|
||||
Value::Table(list.to_vec())
|
||||
}
|
||||
|
||||
pub fn string(s: impl Into<String>) -> Value {
|
||||
Value::Primitive(Primitive::String(s.into()))
|
||||
}
|
||||
@ -837,6 +811,7 @@ impl Tagged<Value> {
|
||||
pub(crate) fn as_path(&self) -> Result<PathBuf, ShellError> {
|
||||
match self.item() {
|
||||
Value::Primitive(Primitive::Path(path)) => Ok(path.clone()),
|
||||
Value::Primitive(Primitive::String(path_str)) => Ok(PathBuf::from(&path_str).clone()),
|
||||
other => Err(ShellError::type_error(
|
||||
"Path",
|
||||
other.type_name().tagged(self.tag()),
|
||||
@ -927,3 +902,252 @@ fn coerce_compare_primitive(
|
||||
_ => return Err((left.type_name(), right.type_name())),
|
||||
})
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::data::meta::*;
|
||||
use crate::ShellError;
|
||||
use crate::Value;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
fn string(input: impl Into<String>) -> Tagged<Value> {
|
||||
Value::string(input.into()).tagged_unknown()
|
||||
}
|
||||
|
||||
fn number(n: i64) -> Tagged<Value> {
|
||||
Value::number(n).tagged_unknown()
|
||||
}
|
||||
|
||||
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
|
||||
Value::row(entries).tagged_unknown()
|
||||
}
|
||||
|
||||
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
|
||||
Value::table(list).tagged_unknown()
|
||||
}
|
||||
|
||||
fn error_callback() -> impl FnOnce((Value, Tagged<Value>)) -> ShellError {
|
||||
move |(_obj_source, _column_path_tried)| ShellError::unimplemented("will never be called.")
|
||||
}
|
||||
|
||||
fn column_path(paths: &Vec<Tagged<Value>>) -> Vec<Tagged<Value>> {
|
||||
table(paths).as_column_path().unwrap().item
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gets_matching_field_from_a_row() {
|
||||
let row = Value::row(indexmap! {
|
||||
"amigos".into() => table(&vec![string("andres"),string("jonathan"),string("yehuda")])
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
*row.get_data_by_key("amigos").unwrap(),
|
||||
table(&vec![
|
||||
string("andres"),
|
||||
string("jonathan"),
|
||||
string("yehuda")
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gets_matching_field_from_nested_rows_inside_a_row() {
|
||||
let field_path = column_path(&vec![string("package"), string("version")]);
|
||||
|
||||
let (version, tag) = string("0.4.0").into_parts();
|
||||
|
||||
let value = Value::row(indexmap! {
|
||||
"package".into() =>
|
||||
row(indexmap! {
|
||||
"name".into() => string("nu"),
|
||||
"version".into() => string("0.4.0")
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
**value
|
||||
.get_data_by_column_path(tag, &field_path, Box::new(error_callback()))
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
version
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn column_path_that_contains_just_a_number_gets_a_row_from_a_table() {
|
||||
let field_path = column_path(&vec![string("package"), string("authors"), number(0)]);
|
||||
|
||||
let (_, tag) = string("Andrés N. Robalino").into_parts();
|
||||
|
||||
let value = Value::row(indexmap! {
|
||||
"package".into() => row(indexmap! {
|
||||
"name".into() => string("nu"),
|
||||
"version".into() => string("0.4.0"),
|
||||
"authors".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("Andrés N. Robalino")}),
|
||||
row(indexmap!{"name".into() => string("Jonathan Turner")}),
|
||||
row(indexmap!{"name".into() => string("Yehuda Katz")})
|
||||
])
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
**value
|
||||
.get_data_by_column_path(tag, &field_path, Box::new(error_callback()))
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
Value::row(indexmap! {
|
||||
"name".into() => string("Andrés N. Robalino")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn column_path_that_contains_just_a_number_gets_a_row_from_a_row() {
|
||||
let field_path = column_path(&vec![string("package"), string("authors"), string("0")]);
|
||||
|
||||
let (_, tag) = string("Andrés N. Robalino").into_parts();
|
||||
|
||||
let value = Value::row(indexmap! {
|
||||
"package".into() => row(indexmap! {
|
||||
"name".into() => string("nu"),
|
||||
"version".into() => string("0.4.0"),
|
||||
"authors".into() => row(indexmap! {
|
||||
"0".into() => row(indexmap!{"name".into() => string("Andrés N. Robalino")}),
|
||||
"1".into() => row(indexmap!{"name".into() => string("Jonathan Turner")}),
|
||||
"2".into() => row(indexmap!{"name".into() => string("Yehuda Katz")}),
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
**value
|
||||
.get_data_by_column_path(tag, &field_path, Box::new(error_callback()))
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
Value::row(indexmap! {
|
||||
"name".into() => string("Andrés N. Robalino")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_matching_field_from_a_row() {
|
||||
let field_path = column_path(&vec![string("amigos")]);
|
||||
|
||||
let sample = Value::row(indexmap! {
|
||||
"amigos".into() => table(&vec![
|
||||
string("andres"),
|
||||
string("jonathan"),
|
||||
string("yehuda"),
|
||||
]),
|
||||
});
|
||||
|
||||
let (replacement, tag) = string("jonas").into_parts();
|
||||
|
||||
let actual = sample
|
||||
.replace_data_at_column_path(tag, &field_path, replacement)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(actual, row(indexmap! {"amigos".into() => string("jonas")}));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_matching_field_from_nested_rows_inside_a_row() {
|
||||
let field_path = column_path(&vec![
|
||||
string("package"),
|
||||
string("authors"),
|
||||
string("los.3.caballeros"),
|
||||
]);
|
||||
|
||||
let sample = Value::row(indexmap! {
|
||||
"package".into() => row(indexmap! {
|
||||
"authors".into() => row(indexmap! {
|
||||
"los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]),
|
||||
"los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]),
|
||||
"los.3.caballeros".into() => table(&vec![string("andres::yehuda::jonathan")])
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
let (replacement, tag) = table(&vec![string("yehuda::jonathan::andres")]).into_parts();
|
||||
|
||||
let actual = sample
|
||||
.replace_data_at_column_path(tag.clone(), &field_path, replacement.clone())
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
actual,
|
||||
Value::row(indexmap! {
|
||||
"package".into() => row(indexmap! {
|
||||
"authors".into() => row(indexmap! {
|
||||
"los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]),
|
||||
"los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]),
|
||||
"los.3.caballeros".into() => replacement.tagged(&tag)})})})
|
||||
.tagged(tag)
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn replaces_matching_field_from_rows_inside_a_table() {
|
||||
let field_path = column_path(&vec![
|
||||
string("shell_policy"),
|
||||
string("releases"),
|
||||
string("nu.version.arepa"),
|
||||
]);
|
||||
|
||||
let sample = Value::row(indexmap! {
|
||||
"shell_policy".into() => row(indexmap! {
|
||||
"releases".into() => table(&vec![
|
||||
row(indexmap! {
|
||||
"nu.version.arepa".into() => row(indexmap! {
|
||||
"code".into() => string("0.4.0"), "tag_line".into() => string("GitHub-era")
|
||||
})
|
||||
}),
|
||||
row(indexmap! {
|
||||
"nu.version.taco".into() => row(indexmap! {
|
||||
"code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era")
|
||||
})
|
||||
}),
|
||||
row(indexmap! {
|
||||
"nu.version.stable".into() => row(indexmap! {
|
||||
"code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era")
|
||||
})
|
||||
})
|
||||
])
|
||||
})
|
||||
});
|
||||
|
||||
let (replacement, tag) = row(indexmap! {
|
||||
"code".into() => string("0.5.0"),
|
||||
"tag_line".into() => string("CABALLEROS")
|
||||
})
|
||||
.into_parts();
|
||||
|
||||
let actual = sample
|
||||
.replace_data_at_column_path(tag.clone(), &field_path, replacement.clone())
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
actual,
|
||||
Value::row(indexmap! {
|
||||
"shell_policy".into() => row(indexmap! {
|
||||
"releases".into() => table(&vec![
|
||||
row(indexmap! {
|
||||
"nu.version.arepa".into() => replacement.tagged(&tag)
|
||||
}),
|
||||
row(indexmap! {
|
||||
"nu.version.taco".into() => row(indexmap! {
|
||||
"code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era")
|
||||
})
|
||||
}),
|
||||
row(indexmap! {
|
||||
"nu.version.stable".into() => row(indexmap! {
|
||||
"code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era")
|
||||
})
|
||||
})
|
||||
])
|
||||
})
|
||||
}).tagged(&tag)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -45,12 +45,12 @@ fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
let mut sig = TaggedListBuilder::new(&tag);
|
||||
|
||||
for arg in signature.positional.iter() {
|
||||
let is_required = match arg {
|
||||
let is_required = match arg.0 {
|
||||
PositionalType::Mandatory(_, _) => true,
|
||||
PositionalType::Optional(_, _) => false,
|
||||
};
|
||||
|
||||
sig.insert_tagged(for_spec(arg.name(), "argument", is_required, &tag));
|
||||
sig.insert_tagged(for_spec(arg.0.name(), "argument", is_required, &tag));
|
||||
}
|
||||
|
||||
if let Some(_) = signature.rest_positional {
|
||||
@ -59,7 +59,7 @@ fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
}
|
||||
|
||||
for (name, ty) in signature.named.iter() {
|
||||
match ty {
|
||||
match ty.0 {
|
||||
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)),
|
||||
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)),
|
||||
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)),
|
||||
|
@ -89,6 +89,17 @@ impl Dictionary {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Tagged<Value>> {
|
||||
match self
|
||||
.entries
|
||||
.iter_mut()
|
||||
.find(|(desc_name, _)| *desc_name == name)
|
||||
{
|
||||
Some((_, v)) => Some(v),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut debug = f.debug_struct("Dictionary");
|
||||
|
||||
|
153
src/data/meta.rs
153
src/data/meta.rs
@ -5,6 +5,7 @@ use derive_new::new;
|
||||
use getset::Getters;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::fmt;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
@ -104,6 +105,17 @@ impl<T> Tagged<T> {
|
||||
mapped.tagged(tag)
|
||||
}
|
||||
|
||||
pub fn map_anchored(self, anchor: &Option<AnchorLocation>) -> Tagged<T> {
|
||||
let mut tag = self.tag;
|
||||
|
||||
tag.anchor = anchor.clone();
|
||||
|
||||
Tagged {
|
||||
item: self.item,
|
||||
tag: tag,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tag(&self) -> Tag {
|
||||
self.tag.clone()
|
||||
}
|
||||
@ -417,16 +429,6 @@ impl Span {
|
||||
self.slice(source).to_string().spanned(*self)
|
||||
}
|
||||
|
||||
/*
|
||||
pub fn unknown_with_uuid(uuid: Uuid) -> Span {
|
||||
Span {
|
||||
start: 0,
|
||||
end: 0,
|
||||
source: Some(uuid),
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
pub fn start(&self) -> usize {
|
||||
self.start
|
||||
}
|
||||
@ -461,3 +463,134 @@ impl language_reporting::ReportingSpan for Span {
|
||||
self.end
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasSpan: ToDebug {
|
||||
fn span(&self) -> Span;
|
||||
}
|
||||
|
||||
pub trait HasFallibleSpan: ToDebug {
|
||||
fn maybe_span(&self) -> Option<Span>;
|
||||
}
|
||||
|
||||
impl<T: HasSpan> HasFallibleSpan for T {
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
Some(HasSpan::span(self))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasSpan for Spanned<T>
|
||||
where
|
||||
Spanned<T>: ToDebug,
|
||||
{
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl HasFallibleSpan for Option<Span> {
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatDebug for Option<Span> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
match self {
|
||||
Option::None => write!(f, "no span"),
|
||||
Option::Some(span) => FormatDebug::fmt_debug(span, f, source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatDebug for Span {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
write!(f, "{:?}", self.slice(source))
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Span {
|
||||
fn span(&self) -> Span {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> FormatDebug for Option<Spanned<T>>
|
||||
where
|
||||
Spanned<T>: ToDebug,
|
||||
{
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
match self {
|
||||
Option::None => write!(f, "nothing"),
|
||||
Option::Some(spanned) => FormatDebug::fmt_debug(spanned, f, source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasFallibleSpan for Option<Spanned<T>>
|
||||
where
|
||||
Spanned<T>: ToDebug,
|
||||
{
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
match self {
|
||||
None => None,
|
||||
Some(value) => Some(value.span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> FormatDebug for Option<Tagged<T>>
|
||||
where
|
||||
Tagged<T>: ToDebug,
|
||||
{
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
match self {
|
||||
Option::None => write!(f, "nothing"),
|
||||
Option::Some(item) => FormatDebug::fmt_debug(item, f, source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasFallibleSpan for Option<Tagged<T>>
|
||||
where
|
||||
Tagged<T>: ToDebug,
|
||||
{
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
match self {
|
||||
None => None,
|
||||
Some(value) => Some(value.tag.span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasSpan for Tagged<T>
|
||||
where
|
||||
Tagged<T>: ToDebug,
|
||||
{
|
||||
fn span(&self) -> Span {
|
||||
self.tag.span
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToDebug> FormatDebug for Vec<T> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
write!(f, "[ ")?;
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
self.iter().map(|item| item.debug(source)).join(" ")
|
||||
)?;
|
||||
write!(f, " ]")
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatDebug for String {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result {
|
||||
write!(f, "{}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatDebug for Spanned<String> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result {
|
||||
write!(f, "{}", self.item)
|
||||
}
|
||||
}
|
||||
|
102
src/errors.rs
102
src/errors.rs
@ -30,6 +30,82 @@ impl Description {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ParseErrorReason {
|
||||
Eof {
|
||||
expected: &'static str,
|
||||
},
|
||||
Mismatch {
|
||||
expected: &'static str,
|
||||
actual: Tagged<String>,
|
||||
},
|
||||
ArgumentError {
|
||||
command: String,
|
||||
error: ArgumentError,
|
||||
tag: Tag,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ParseError {
|
||||
reason: ParseErrorReason,
|
||||
tag: Tag,
|
||||
}
|
||||
|
||||
impl ParseError {
|
||||
pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError {
|
||||
ParseError {
|
||||
reason: ParseErrorReason::Eof { expected },
|
||||
tag: span.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mismatch(expected: &'static str, actual: Tagged<impl Into<String>>) -> ParseError {
|
||||
let Tagged { tag, item } = actual;
|
||||
|
||||
ParseError {
|
||||
reason: ParseErrorReason::Mismatch {
|
||||
expected,
|
||||
actual: item.into().tagged(tag.clone()),
|
||||
},
|
||||
tag,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn argument_error(
|
||||
command: impl Into<String>,
|
||||
kind: ArgumentError,
|
||||
tag: impl Into<Tag>,
|
||||
) -> ParseError {
|
||||
let tag = tag.into();
|
||||
|
||||
ParseError {
|
||||
reason: ParseErrorReason::ArgumentError {
|
||||
command: command.into(),
|
||||
error: kind,
|
||||
tag: tag.clone(),
|
||||
},
|
||||
tag: tag.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ParseError> for ShellError {
|
||||
fn from(error: ParseError) -> ShellError {
|
||||
match error.reason {
|
||||
ParseErrorReason::Eof { expected } => ShellError::unexpected_eof(expected, error.tag),
|
||||
ParseErrorReason::Mismatch { actual, expected } => {
|
||||
ShellError::type_error(expected, actual.clone())
|
||||
}
|
||||
ParseErrorReason::ArgumentError {
|
||||
command,
|
||||
error,
|
||||
tag,
|
||||
} => ShellError::argument_error(command, error, tag),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
pub enum ArgumentError {
|
||||
MissingMandatoryFlag(String),
|
||||
@ -51,8 +127,8 @@ impl ShellError {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for ShellError {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl FormatDebug for ShellError {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
self.error.fmt_debug(f, source)
|
||||
}
|
||||
}
|
||||
@ -153,16 +229,6 @@ impl ShellError {
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn invalid_external_word(tag: impl Into<Tag>) -> ShellError {
|
||||
ProximateShellError::ArgumentError {
|
||||
command: "Invalid argument to Nu command (did you mean to call an external command?)"
|
||||
.into(),
|
||||
error: ArgumentError::InvalidExternalWord,
|
||||
tag: tag.into(),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn parse_error(
|
||||
error: nom::Err<(
|
||||
nom_locate::LocatedSpanEx<&str, TracableContext>,
|
||||
@ -367,6 +433,10 @@ impl ShellError {
|
||||
// pub fn string(title: impl Into<String>) -> ShellError {
|
||||
// ProximateShellError::String(StringError::new(title.into(), String::new())).start()
|
||||
// }
|
||||
//
|
||||
// pub(crate) fn unreachable(title: impl Into<String>) -> ShellError {
|
||||
// ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into()))
|
||||
// }
|
||||
|
||||
pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
|
||||
@ -375,10 +445,6 @@ impl ShellError {
|
||||
pub(crate) fn unexpected(title: impl Into<String>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into()))
|
||||
}
|
||||
|
||||
pub(crate) fn unreachable(title: impl Into<String>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
@ -490,8 +556,8 @@ impl ProximateShellError {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for ProximateShellError {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
|
||||
impl FormatDebug for ProximateShellError {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result {
|
||||
// TODO: Custom debug for inner spans
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ use crate::parser::{
|
||||
CommandRegistry, Text,
|
||||
};
|
||||
use crate::prelude::*;
|
||||
use crate::TaggedDictBuilder;
|
||||
use derive_new::new;
|
||||
use indexmap::IndexMap;
|
||||
use log::trace;
|
||||
@ -148,7 +149,7 @@ fn evaluate_literal(literal: Tagged<&hir::Literal>, source: &Text) -> Tagged<Val
|
||||
hir::Literal::Number(int) => int.into(),
|
||||
hir::Literal::Size(int, unit) => unit.compute(int),
|
||||
hir::Literal::String(tag) => Value::string(tag.slice(source)),
|
||||
hir::Literal::GlobPattern => Value::pattern(literal.tag().slice(source)),
|
||||
hir::Literal::GlobPattern(pattern) => Value::pattern(pattern),
|
||||
hir::Literal::Bare => Value::string(literal.tag().slice(source)),
|
||||
};
|
||||
|
||||
@ -164,11 +165,38 @@ fn evaluate_reference(
|
||||
trace!("Evaluating {} with Scope {}", name, scope);
|
||||
match name {
|
||||
hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)),
|
||||
hir::Variable::Other(inner) => Ok(scope
|
||||
.vars
|
||||
.get(inner.slice(source))
|
||||
.map(|v| v.clone())
|
||||
.unwrap_or_else(|| Value::nothing().tagged(tag))),
|
||||
hir::Variable::Other(inner) => match inner.slice(source) {
|
||||
x if x == "nu:env" => {
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
for v in std::env::vars() {
|
||||
if v.0 != "PATH" && v.0 != "Path" {
|
||||
dict.insert(v.0, Value::string(v.1));
|
||||
}
|
||||
}
|
||||
Ok(dict.into_tagged_value())
|
||||
}
|
||||
x if x == "nu:config" => {
|
||||
let config = crate::data::config::read(tag.clone(), &None)?;
|
||||
Ok(Value::row(config).tagged(tag))
|
||||
}
|
||||
x if x == "nu:path" => {
|
||||
let mut table = vec![];
|
||||
match std::env::var_os("PATH") {
|
||||
Some(paths) => {
|
||||
for path in std::env::split_paths(&paths) {
|
||||
table.push(Value::path(path).tagged(&tag));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Ok(Value::table(&table).tagged(tag))
|
||||
}
|
||||
x => Ok(scope
|
||||
.vars
|
||||
.get(x)
|
||||
.map(|v| v.clone())
|
||||
.unwrap_or_else(|| Value::nothing().tagged(tag))),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,11 +23,20 @@ enum TableMode {
|
||||
|
||||
impl TableView {
|
||||
fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
|
||||
let mut ret = vec![];
|
||||
let mut ret: Vec<String> = vec![];
|
||||
let value_column = "<value>".to_string();
|
||||
for value in values {
|
||||
for desc in value.data_descriptors() {
|
||||
if !ret.contains(&desc) {
|
||||
ret.push(desc);
|
||||
let descs = value.data_descriptors();
|
||||
|
||||
if descs.len() == 0 {
|
||||
if !ret.contains(&value_column) {
|
||||
ret.push("<value>".to_string());
|
||||
}
|
||||
} else {
|
||||
for desc in value.data_descriptors() {
|
||||
if !ret.contains(&desc) {
|
||||
ret.push(desc);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -42,29 +51,65 @@ impl TableView {
|
||||
let mut headers = TableView::merge_descriptors(values);
|
||||
|
||||
if headers.len() == 0 {
|
||||
headers.push("<unknown>".to_string());
|
||||
headers.push("<value>".to_string());
|
||||
}
|
||||
|
||||
let mut entries = vec![];
|
||||
|
||||
for (idx, value) in values.iter().enumerate() {
|
||||
let mut row: Vec<(String, &'static str)> = match value {
|
||||
Tagged {
|
||||
item: Value::Row(..),
|
||||
..
|
||||
} => headers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, d)| {
|
||||
let data = value.get_data(d);
|
||||
return (
|
||||
data.borrow().format_leaf(Some(&headers[i])),
|
||||
data.borrow().style_leaf(),
|
||||
);
|
||||
})
|
||||
.collect(),
|
||||
x => vec![(x.format_leaf(None), x.style_leaf())],
|
||||
};
|
||||
// let mut row: Vec<(String, &'static str)> = match value {
|
||||
// Tagged {
|
||||
// item: Value::Row(..),
|
||||
// ..
|
||||
// } => headers
|
||||
// .iter()
|
||||
// .enumerate()
|
||||
// .map(|(i, d)| {
|
||||
// let data = value.get_data(d);
|
||||
// return (
|
||||
// data.borrow().format_leaf(Some(&headers[i])),
|
||||
// data.borrow().style_leaf(),
|
||||
// );
|
||||
// })
|
||||
// .collect(),
|
||||
// x => vec![(x.format_leaf(None), x.style_leaf())],
|
||||
// };
|
||||
|
||||
let mut row: Vec<(String, &'static str)> = headers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, d)| {
|
||||
if d == "<value>" {
|
||||
match value {
|
||||
Tagged {
|
||||
item: Value::Row(..),
|
||||
..
|
||||
} => (
|
||||
Value::nothing().format_leaf(None),
|
||||
Value::nothing().style_leaf(),
|
||||
),
|
||||
_ => (value.format_leaf(None), value.style_leaf()),
|
||||
}
|
||||
} else {
|
||||
match value {
|
||||
Tagged {
|
||||
item: Value::Row(..),
|
||||
..
|
||||
} => {
|
||||
let data = value.get_data(d);
|
||||
(
|
||||
data.borrow().format_leaf(Some(&headers[i])),
|
||||
data.borrow().style_leaf(),
|
||||
)
|
||||
}
|
||||
_ => (
|
||||
Value::nothing().format_leaf(None),
|
||||
Value::nothing().style_leaf(),
|
||||
),
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
if values.len() > 1 {
|
||||
// Indices are black, bold, right-aligned:
|
||||
|
@ -73,9 +73,7 @@ pub fn interactive_fuzzy_search(lines: &Vec<&str>, max_results: usize) -> Select
|
||||
searchinput.pop();
|
||||
selected = 0;
|
||||
}
|
||||
_ => {
|
||||
// println!("OTHER InputEvent: {:?}", k);
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
|
12
src/lib.rs
12
src/lib.rs
@ -1,5 +1,9 @@
|
||||
#![recursion_limit = "1024"]
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate indexmap;
|
||||
|
||||
#[macro_use]
|
||||
mod prelude;
|
||||
|
||||
@ -26,12 +30,16 @@ pub use crate::env::host::BasicHost;
|
||||
pub use crate::parser::hir::SyntaxShape;
|
||||
pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder;
|
||||
pub use crate::plugin::{serve_plugin, Plugin};
|
||||
pub use crate::utils::{AbsoluteFile, AbsolutePath, RelativePath};
|
||||
pub use crate::traits::{DebugFormatter, FormatDebug, ToDebug};
|
||||
pub use crate::utils::{did_you_mean, AbsoluteFile, AbsolutePath, RelativePath};
|
||||
pub use cli::cli;
|
||||
pub use data::base::{Primitive, Value};
|
||||
pub use data::config::{config_path, APP_INFO};
|
||||
pub use data::dict::{Dictionary, TaggedDictBuilder};
|
||||
pub use data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem};
|
||||
pub use data::meta::{
|
||||
tag_for_tagged_list, HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Tagged,
|
||||
TaggedItem,
|
||||
};
|
||||
pub use errors::{CoerceInto, ShellError};
|
||||
pub use num_traits::cast::ToPrimitive;
|
||||
pub use parser::parse::text::Text;
|
||||
|
18
src/main.rs
18
src/main.rs
@ -3,9 +3,6 @@ use log::LevelFilter;
|
||||
use std::error::Error;
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
#[cfg(feature1)]
|
||||
println!("feature1 is enabled");
|
||||
|
||||
let matches = App::new("nushell")
|
||||
.version(clap::crate_version!())
|
||||
.arg(
|
||||
@ -22,6 +19,12 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||
.multiple(true)
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("debug")
|
||||
.long("debug")
|
||||
.multiple(true)
|
||||
.takes_value(true),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
let loglevel = match matches.value_of("loglevel") {
|
||||
@ -51,6 +54,15 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||
}
|
||||
}
|
||||
|
||||
match matches.values_of("debug") {
|
||||
None => {}
|
||||
Some(values) => {
|
||||
for item in values {
|
||||
builder.filter_module(&format!("nu::{}", item), LevelFilter::Debug);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
builder.try_init()?;
|
||||
|
||||
futures::executor::block_on(nu::cli())?;
|
||||
|
@ -14,7 +14,6 @@ pub(crate) use parse::files::Files;
|
||||
pub(crate) use parse::flag::{Flag, FlagKind};
|
||||
pub(crate) use parse::operator::Operator;
|
||||
pub(crate) use parse::parser::{nom_input, pipeline};
|
||||
pub(crate) use parse::pipeline::{Pipeline, PipelineElement};
|
||||
pub(crate) use parse::text::Text;
|
||||
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||
pub(crate) use parse::tokens::{RawNumber, RawToken};
|
||||
|
@ -61,7 +61,7 @@ impl<'de> ConfigDeserializer<'de> {
|
||||
pub fn top(&mut self) -> &DeserializerItem {
|
||||
let value = self.stack.last();
|
||||
trace!("inspecting top value :: {:?}", value);
|
||||
value.expect("Can't get top elemant of an empty stack")
|
||||
value.expect("Can't get top element of an empty stack")
|
||||
}
|
||||
|
||||
pub fn pop(&mut self) -> DeserializerItem {
|
||||
@ -486,8 +486,8 @@ mod tests {
|
||||
// is unspecified and change is likely.
|
||||
// This test makes sure that such change is detected
|
||||
// by this test failing, and not things silently breaking.
|
||||
// Specifically, we rely on this behaviour further above
|
||||
// in the file to special case Tagged<Value> parsing.
|
||||
// Specifically, we rely on this behavior further above
|
||||
// in the file for the Tagged<Value> special case parsing.
|
||||
let tuple = type_name::<()>();
|
||||
let tagged_tuple = type_name::<Tagged<()>>();
|
||||
let tagged_value = type_name::<Tagged<Value>>();
|
||||
|
@ -24,7 +24,6 @@ pub(crate) use self::external_command::ExternalCommand;
|
||||
pub(crate) use self::named::NamedArguments;
|
||||
pub(crate) use self::path::Path;
|
||||
pub(crate) use self::syntax_shape::ExpandContext;
|
||||
pub(crate) use self::tokens_iterator::debug::debug_tokens;
|
||||
pub(crate) use self::tokens_iterator::TokensIterator;
|
||||
|
||||
pub use self::syntax_shape::SyntaxShape;
|
||||
@ -50,8 +49,8 @@ impl Call {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for Call {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl FormatDebug for Call {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
write!(f, "({}", self.head.debug(source))?;
|
||||
|
||||
if let Some(positional) = &self.positional {
|
||||
@ -227,8 +226,8 @@ impl Expression {
|
||||
RawExpression::Literal(Literal::Bare).spanned(span)
|
||||
}
|
||||
|
||||
pub(crate) fn pattern(span: impl Into<Span>) -> Expression {
|
||||
RawExpression::Literal(Literal::GlobPattern).spanned(span.into())
|
||||
pub(crate) fn pattern(inner: impl Into<String>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::Literal(Literal::GlobPattern(inner.into())).spanned(outer.into())
|
||||
}
|
||||
|
||||
pub(crate) fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||
@ -242,10 +241,14 @@ impl Expression {
|
||||
pub(crate) fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::Variable(Variable::It(inner.into())).spanned(outer)
|
||||
}
|
||||
|
||||
pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||
self.item.type_name().tagged(self.span)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for Expression {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl FormatDebug for Spanned<RawExpression> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
match &self.item {
|
||||
RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source),
|
||||
RawExpression::FilePath(p) => write!(f, "{}", p.display()),
|
||||
@ -256,7 +259,7 @@ impl ToDebug for Expression {
|
||||
RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)),
|
||||
RawExpression::Binary(b) => write!(f, "{}", b.debug(source)),
|
||||
RawExpression::ExternalCommand(c) => write!(f, "^{}", c.name().slice(source)),
|
||||
RawExpression::Block(exprs) => {
|
||||
RawExpression::Block(exprs) => f.say_block("block", |f| {
|
||||
write!(f, "{{ ")?;
|
||||
|
||||
for expr in exprs {
|
||||
@ -264,8 +267,8 @@ impl ToDebug for Expression {
|
||||
}
|
||||
|
||||
write!(f, "}}")
|
||||
}
|
||||
RawExpression::List(exprs) => {
|
||||
}),
|
||||
RawExpression::List(exprs) => f.say_block("list", |f| {
|
||||
write!(f, "[ ")?;
|
||||
|
||||
for expr in exprs {
|
||||
@ -273,7 +276,7 @@ impl ToDebug for Expression {
|
||||
}
|
||||
|
||||
write!(f, "]")
|
||||
}
|
||||
}),
|
||||
RawExpression::Path(p) => write!(f, "{}", p.debug(source)),
|
||||
RawExpression::Boolean(true) => write!(f, "$yes"),
|
||||
RawExpression::Boolean(false) => write!(f, "$no"),
|
||||
@ -297,7 +300,7 @@ pub enum Literal {
|
||||
Number(Number),
|
||||
Size(Number, Unit),
|
||||
String(Span),
|
||||
GlobPattern,
|
||||
GlobPattern(String),
|
||||
Bare,
|
||||
}
|
||||
|
||||
@ -315,20 +318,20 @@ impl std::fmt::Display for Tagged<&Literal> {
|
||||
Literal::Number(number) => write!(f, "{}", number),
|
||||
Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()),
|
||||
Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()),
|
||||
Literal::GlobPattern => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()),
|
||||
Literal::GlobPattern(_) => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()),
|
||||
Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for Spanned<&Literal> {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl FormatDebug for Spanned<&Literal> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
match self.item {
|
||||
Literal::Number(number) => write!(f, "{:?}", number),
|
||||
Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit),
|
||||
Literal::String(tag) => write!(f, "{}", tag.slice(source)),
|
||||
Literal::GlobPattern => write!(f, "{}", self.span.slice(source)),
|
||||
Literal::Bare => write!(f, "{}", self.span.slice(source)),
|
||||
Literal::Number(..) => f.say_str("number", self.span.slice(source)),
|
||||
Literal::Size(..) => f.say_str("size", self.span.slice(source)),
|
||||
Literal::String(..) => f.say_str("string", self.span.slice(source)),
|
||||
Literal::GlobPattern(..) => f.say_str("glob", self.span.slice(source)),
|
||||
Literal::Bare => f.say_str("word", self.span.slice(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -340,7 +343,7 @@ impl Literal {
|
||||
Literal::Size(..) => "size",
|
||||
Literal::String(..) => "string",
|
||||
Literal::Bare => "string",
|
||||
Literal::GlobPattern => "pattern",
|
||||
Literal::GlobPattern(_) => "pattern",
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -359,3 +362,9 @@ impl std::fmt::Display for Variable {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatDebug for Spanned<Variable> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
write!(f, "{}", self.span.slice(source))
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ use crate::parser::hir::syntax_shape::*;
|
||||
use crate::parser::hir::TokensIterator;
|
||||
use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
||||
use crate::parser::TokenNode;
|
||||
use crate::{Span, SpannedItem, Tag, Tagged, Text};
|
||||
use crate::{HasSpan, Span, SpannedItem, Tag, Text};
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fmt::Debug;
|
||||
|
||||
@ -63,7 +63,9 @@ fn test_parse_command() {
|
||||
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
||||
|tokens| {
|
||||
let bare = tokens[0].expect_bare();
|
||||
let pat = tokens[2].span();
|
||||
let pat = tokens[2].expect_pattern();
|
||||
|
||||
eprintln!("{:?} {:?} {:?}", bare, pat, bare.until(pat));
|
||||
|
||||
ClassifiedCommand::Internal(InternalCommand::new(
|
||||
"ls".to_string(),
|
||||
@ -73,9 +75,10 @@ fn test_parse_command() {
|
||||
},
|
||||
hir::Call {
|
||||
head: Box::new(hir::RawExpression::Command(bare).spanned(bare)),
|
||||
positional: Some(vec![hir::Expression::pattern(pat)]),
|
||||
positional: Some(vec![hir::Expression::pattern("*.txt", pat)]),
|
||||
named: None,
|
||||
},
|
||||
}
|
||||
.spanned(bare.until(pat)),
|
||||
))
|
||||
// hir::Expression::path(
|
||||
// hir::Expression::variable(inner_var, outer_var),
|
||||
@ -84,53 +87,31 @@ fn test_parse_command() {
|
||||
// )
|
||||
},
|
||||
);
|
||||
|
||||
parse_tokens(
|
||||
VariablePathShape,
|
||||
vec![
|
||||
b::var("cpu"),
|
||||
b::op("."),
|
||||
b::bare("amount"),
|
||||
b::op("."),
|
||||
b::string("max ghz"),
|
||||
],
|
||||
|tokens| {
|
||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||
let amount = tokens[2].expect_bare();
|
||||
let (outer_max_ghz, _) = tokens[4].expect_string();
|
||||
|
||||
hir::Expression::path(
|
||||
hir::Expression::variable(inner_var, outer_var),
|
||||
vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)],
|
||||
outer_var.until(outer_max_ghz),
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
fn parse_tokens<T: Eq + Debug>(
|
||||
fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
|
||||
shape: impl ExpandSyntax<Output = T>,
|
||||
tokens: Vec<CurriedToken>,
|
||||
expected: impl FnOnce(Tagged<&[TokenNode]>) -> T,
|
||||
expected: impl FnOnce(&[TokenNode]) -> T,
|
||||
) {
|
||||
let tokens = b::token_list(tokens);
|
||||
let (tokens, source) = b::build(tokens);
|
||||
|
||||
ExpandContext::with_empty(&Text::from(source), |context| {
|
||||
let tokens = tokens.expect_list();
|
||||
let mut iterator = TokensIterator::all(tokens.item, *context.span());
|
||||
let mut iterator = TokensIterator::all(tokens.item, tokens.span);
|
||||
|
||||
let expr = expand_syntax(&shape, &mut iterator, &context);
|
||||
|
||||
let expr = match expr {
|
||||
Ok(expr) => expr,
|
||||
Err(err) => {
|
||||
crate::cli::print_err(err, &BasicHost, context.source().clone());
|
||||
crate::cli::print_err(err.into(), &BasicHost, context.source().clone());
|
||||
panic!("Parse failed");
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(expr, expected(tokens));
|
||||
assert_eq!(expr, expected(tokens.item));
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -22,8 +22,8 @@ impl fmt::Display for Binary {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for Binary {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl FormatDebug for Binary {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
write!(f, "{}", self.left.debug(source))?;
|
||||
write!(f, " {} ", self.op.debug(source))?;
|
||||
write!(f, "{}", self.right.debug(source))?;
|
||||
|
@ -1,35 +1,55 @@
|
||||
use crate::errors::ShellError;
|
||||
use crate::errors::ParseError;
|
||||
#[cfg(not(coloring_in_tokens))]
|
||||
use crate::parser::hir::syntax_shape::FlatShape;
|
||||
use crate::parser::{
|
||||
hir::syntax_shape::{
|
||||
color_syntax, expand_atom, AtomicToken, ColorSyntax, ExpandContext, ExpansionRule,
|
||||
MaybeSpaceShape,
|
||||
color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax,
|
||||
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, MaybeSpaceShape,
|
||||
},
|
||||
TokenNode, TokensIterator,
|
||||
hir::Expression,
|
||||
TokensIterator,
|
||||
};
|
||||
use crate::{Span, Spanned, Text};
|
||||
|
||||
pub fn expand_external_tokens(
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
source: &Text,
|
||||
) -> Result<Vec<Spanned<String>>, ShellError> {
|
||||
let mut out: Vec<Spanned<String>> = vec![];
|
||||
|
||||
loop {
|
||||
if let Some(span) = expand_next_expression(token_nodes)? {
|
||||
out.push(span.spanned_string(source));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
use crate::{DebugFormatter, FormatDebug, Span, Spanned, SpannedItem};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExternalTokensShape;
|
||||
|
||||
impl FormatDebug for Spanned<Vec<Spanned<String>>> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
FormatDebug::fmt_debug(&self.item, f, source)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for ExternalTokensShape {
|
||||
type Output = Spanned<Vec<Spanned<String>>>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"external command"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let mut out: Vec<Spanned<String>> = vec![];
|
||||
|
||||
let start = token_nodes.span_at_cursor();
|
||||
|
||||
loop {
|
||||
match expand_syntax(&ExternalExpressionShape, token_nodes, context) {
|
||||
Err(_) | Ok(None) => break,
|
||||
Ok(Some(span)) => out.push(span.spanned_string(context.source())),
|
||||
}
|
||||
}
|
||||
|
||||
let end = token_nodes.span_at_cursor();
|
||||
|
||||
Ok(out.spanned(start.until(end)))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(coloring_in_tokens))]
|
||||
impl ColorSyntax for ExternalTokensShape {
|
||||
type Info = ();
|
||||
@ -61,6 +81,10 @@ impl ColorSyntax for ExternalTokensShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ExternalTokensShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -81,70 +105,235 @@ impl ColorSyntax for ExternalTokensShape {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_next_expression(
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<Option<Span>, ShellError> {
|
||||
let first = token_nodes.next_non_ws();
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExternalExpressionShape;
|
||||
|
||||
let first = match first {
|
||||
None => return Ok(None),
|
||||
Some(v) => v,
|
||||
};
|
||||
impl ExpandSyntax for ExternalExpressionShape {
|
||||
type Output = Option<Span>;
|
||||
|
||||
let first = triage_external_head(first)?;
|
||||
let mut last = first;
|
||||
|
||||
loop {
|
||||
let continuation = triage_continuation(token_nodes)?;
|
||||
|
||||
if let Some(continuation) = continuation {
|
||||
last = continuation;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
fn name(&self) -> &'static str {
|
||||
"external expression"
|
||||
}
|
||||
|
||||
Ok(Some(first.until(last)))
|
||||
}
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
expand_syntax(&MaybeSpaceShape, token_nodes, context)?;
|
||||
|
||||
fn triage_external_head(node: &TokenNode) -> Result<Span, ShellError> {
|
||||
Ok(match node {
|
||||
TokenNode::Token(token) => token.span,
|
||||
TokenNode::Call(_call) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Flag(flag) => flag.span,
|
||||
TokenNode::Whitespace(_whitespace) => {
|
||||
unreachable!("This function should be called after next_non_ws()")
|
||||
let first = expand_atom(
|
||||
token_nodes,
|
||||
"external command",
|
||||
context,
|
||||
ExpansionRule::new().allow_external_command(),
|
||||
)?
|
||||
.span;
|
||||
|
||||
let mut last = first;
|
||||
|
||||
loop {
|
||||
let continuation = expand_expr(&ExternalContinuationShape, token_nodes, context);
|
||||
|
||||
if let Ok(continuation) = continuation {
|
||||
last = continuation.span;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
TokenNode::Error(_error) => unimplemented!("TODO: OMG"),
|
||||
})
|
||||
|
||||
Ok(Some(first.until(last)))
|
||||
}
|
||||
}
|
||||
|
||||
fn triage_continuation<'a, 'b>(
|
||||
nodes: &'a mut TokensIterator<'b>,
|
||||
) -> Result<Option<Span>, ShellError> {
|
||||
let mut peeked = nodes.peek_any();
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct ExternalExpression;
|
||||
|
||||
let node = match peeked.node {
|
||||
None => return Ok(None),
|
||||
Some(node) => node,
|
||||
};
|
||||
impl ExpandSyntax for ExternalExpression {
|
||||
type Output = Option<Span>;
|
||||
|
||||
match &node {
|
||||
node if node.is_whitespace() => return Ok(None),
|
||||
TokenNode::Token(..) | TokenNode::Flag(..) => {}
|
||||
TokenNode::Call(..) => unimplemented!("call"),
|
||||
TokenNode::Nodes(..) => unimplemented!("nodes"),
|
||||
TokenNode::Delimited(..) => unimplemented!("delimited"),
|
||||
TokenNode::Pipeline(..) => unimplemented!("pipeline"),
|
||||
TokenNode::Whitespace(..) => unimplemented!("whitespace"),
|
||||
TokenNode::Error(..) => unimplemented!("error"),
|
||||
fn name(&self) -> &'static str {
|
||||
"external expression"
|
||||
}
|
||||
|
||||
peeked.commit();
|
||||
Ok(Some(node.span()))
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
expand_syntax(&MaybeSpaceShape, token_nodes, context)?;
|
||||
|
||||
let first = expand_syntax(&ExternalHeadShape, token_nodes, context)?.span;
|
||||
let mut last = first;
|
||||
|
||||
loop {
|
||||
let continuation = expand_syntax(&ExternalContinuationShape, token_nodes, context);
|
||||
|
||||
if let Ok(continuation) = continuation {
|
||||
last = continuation.span;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Some(first.until(last)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct ExternalHeadShape;
|
||||
|
||||
impl ExpandExpression for ExternalHeadShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"external argument"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Expression, ParseError> {
|
||||
match expand_atom(
|
||||
token_nodes,
|
||||
"external argument",
|
||||
context,
|
||||
ExpansionRule::new()
|
||||
.allow_external_word()
|
||||
.treat_size_as_word(),
|
||||
)? {
|
||||
atom => match &atom {
|
||||
Spanned { item, span } => Ok(match item {
|
||||
AtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"),
|
||||
AtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"),
|
||||
AtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"),
|
||||
AtomicToken::Whitespace { .. } => {
|
||||
unreachable!("ExpansionRule doesn't allow Whitespace")
|
||||
}
|
||||
AtomicToken::ShorthandFlag { .. }
|
||||
| AtomicToken::LonghandFlag { .. }
|
||||
| AtomicToken::SquareDelimited { .. }
|
||||
| AtomicToken::ParenDelimited { .. }
|
||||
| AtomicToken::BraceDelimited { .. }
|
||||
| AtomicToken::Pipeline { .. } => {
|
||||
return Err(ParseError::mismatch(
|
||||
"external command name",
|
||||
atom.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
AtomicToken::ExternalCommand { command } => {
|
||||
Expression::external_command(*command, *span)
|
||||
}
|
||||
AtomicToken::Number { number } => {
|
||||
Expression::number(number.to_number(context.source()), *span)
|
||||
}
|
||||
AtomicToken::String { body } => Expression::string(*body, *span),
|
||||
AtomicToken::ItVariable { name } => Expression::it_variable(*name, *span),
|
||||
AtomicToken::Variable { name } => Expression::variable(*name, *span),
|
||||
AtomicToken::ExternalWord { .. }
|
||||
| AtomicToken::GlobPattern { .. }
|
||||
| AtomicToken::FilePath { .. }
|
||||
| AtomicToken::Word { .. }
|
||||
| AtomicToken::Dot { .. }
|
||||
| AtomicToken::Operator { .. } => Expression::external_command(*span, *span),
|
||||
}),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct ExternalContinuationShape;
|
||||
|
||||
impl ExpandExpression for ExternalContinuationShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"external argument"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Expression, ParseError> {
|
||||
match expand_atom(
|
||||
token_nodes,
|
||||
"external argument",
|
||||
context,
|
||||
ExpansionRule::new()
|
||||
.allow_external_word()
|
||||
.treat_size_as_word(),
|
||||
)? {
|
||||
atom => match &atom {
|
||||
Spanned { item, span } => Ok(match item {
|
||||
AtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"),
|
||||
AtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"),
|
||||
AtomicToken::Number { number } => {
|
||||
Expression::number(number.to_number(context.source()), *span)
|
||||
}
|
||||
AtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"),
|
||||
AtomicToken::ExternalCommand { .. } => {
|
||||
unreachable!("ExpansionRule doesn't allow ExternalCommand")
|
||||
}
|
||||
AtomicToken::Whitespace { .. } => {
|
||||
unreachable!("ExpansionRule doesn't allow Whitespace")
|
||||
}
|
||||
AtomicToken::String { body } => Expression::string(*body, *span),
|
||||
AtomicToken::ItVariable { name } => Expression::it_variable(*name, *span),
|
||||
AtomicToken::Variable { name } => Expression::variable(*name, *span),
|
||||
AtomicToken::ExternalWord { .. }
|
||||
| AtomicToken::GlobPattern { .. }
|
||||
| AtomicToken::FilePath { .. }
|
||||
| AtomicToken::Word { .. }
|
||||
| AtomicToken::ShorthandFlag { .. }
|
||||
| AtomicToken::LonghandFlag { .. }
|
||||
| AtomicToken::Dot { .. }
|
||||
| AtomicToken::Operator { .. } => Expression::bare(*span),
|
||||
AtomicToken::SquareDelimited { .. }
|
||||
| AtomicToken::ParenDelimited { .. }
|
||||
| AtomicToken::BraceDelimited { .. }
|
||||
| AtomicToken::Pipeline { .. } => {
|
||||
return Err(ParseError::mismatch(
|
||||
"external argument",
|
||||
atom.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
}),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(coloring_in_tokens)]
|
||||
impl ColorSyntax for ExternalExpression {
|
||||
type Info = ExternalExpressionResult;
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ExternalExpression"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> ExternalExpressionResult {
|
||||
let atom = match expand_atom(
|
||||
token_nodes,
|
||||
"external word",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
) {
|
||||
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
|
||||
Ok(Spanned {
|
||||
item: AtomicToken::Eof { .. },
|
||||
..
|
||||
}) => return ExternalExpressionResult::Eof,
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
||||
return ExternalExpressionResult::Processed;
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
@ -153,9 +342,6 @@ enum ExternalExpressionResult {
|
||||
Processed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct ExternalExpression;
|
||||
|
||||
#[cfg(not(coloring_in_tokens))]
|
||||
impl ColorSyntax for ExternalExpression {
|
||||
type Info = ExternalExpressionResult;
|
||||
@ -186,33 +372,3 @@ impl ColorSyntax for ExternalExpression {
|
||||
return ExternalExpressionResult::Processed;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(coloring_in_tokens)]
|
||||
impl ColorSyntax for ExternalExpression {
|
||||
type Info = ExternalExpressionResult;
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> ExternalExpressionResult {
|
||||
let atom = match expand_atom(
|
||||
token_nodes,
|
||||
"external word",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
) {
|
||||
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
|
||||
Ok(Spanned {
|
||||
item: AtomicToken::Eof { .. },
|
||||
..
|
||||
}) => return ExternalExpressionResult::Eof,
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
atom.color_tokens(token_nodes.mut_shapes());
|
||||
return ExternalExpressionResult::Processed;
|
||||
}
|
||||
}
|
||||
|
@ -12,8 +12,8 @@ pub struct ExternalCommand {
|
||||
pub(crate) name: Span,
|
||||
}
|
||||
|
||||
impl ToDebug for ExternalCommand {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl FormatDebug for ExternalCommand {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
write!(f, "{}", self.name.slice(source))?;
|
||||
|
||||
Ok(())
|
||||
|
@ -21,8 +21,8 @@ pub struct NamedArguments {
|
||||
pub(crate) named: IndexMap<String, NamedValue>,
|
||||
}
|
||||
|
||||
impl ToDebug for NamedArguments {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl FormatDebug for NamedArguments {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
for (name, value) in &self.named {
|
||||
match value {
|
||||
NamedValue::AbsentSwitch => continue,
|
||||
|
@ -44,8 +44,8 @@ impl Path {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for Path {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
impl FormatDebug for Path {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
write!(f, "{}", self.head.debug(source))?;
|
||||
|
||||
for part in &self.tail {
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -6,7 +6,8 @@ use crate::parser::{
|
||||
hir::syntax_shape::{
|
||||
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
|
||||
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
|
||||
ExpressionListShape, FallibleColorSyntax, MemberShape, PathTailShape, VariablePathShape,
|
||||
ExpressionListShape, FallibleColorSyntax, MemberShape, ParseError, PathTailShape,
|
||||
VariablePathShape,
|
||||
},
|
||||
hir::tokens_iterator::TokensIterator,
|
||||
parse::token_tree::Delimiter,
|
||||
@ -42,7 +43,7 @@ impl FallibleColorSyntax for AnyBlockShape {
|
||||
match block {
|
||||
// If so, color it as a block
|
||||
Some((children, spans)) => {
|
||||
let mut token_nodes = TokensIterator::new(children.item, context.span, false);
|
||||
let mut token_nodes = TokensIterator::new(children.item, children.span, false);
|
||||
color_syntax_with(
|
||||
&DelimitedShape,
|
||||
&(Delimiter::Brace, spans.0, spans.1),
|
||||
@ -66,6 +67,10 @@ impl FallibleColorSyntax for AnyBlockShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"AnyBlockShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -85,13 +90,14 @@ impl FallibleColorSyntax for AnyBlockShape {
|
||||
match block {
|
||||
// If so, color it as a block
|
||||
Some((children, spans)) => {
|
||||
let mut token_nodes = TokensIterator::new(children.item, context.span, false);
|
||||
color_syntax_with(
|
||||
&DelimitedShape,
|
||||
&(Delimiter::Brace, spans.0, spans.1),
|
||||
&mut token_nodes,
|
||||
context,
|
||||
);
|
||||
token_nodes.child(children, |token_nodes| {
|
||||
color_syntax_with(
|
||||
&DelimitedShape,
|
||||
&(Delimiter::Brace, spans.0, spans.1),
|
||||
token_nodes,
|
||||
context,
|
||||
);
|
||||
});
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
@ -104,11 +110,15 @@ impl FallibleColorSyntax for AnyBlockShape {
|
||||
}
|
||||
|
||||
impl ExpandExpression for AnyBlockShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"any block"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let block = token_nodes.peek_non_ws().not_eof("block")?;
|
||||
|
||||
// is it just a block?
|
||||
@ -116,11 +126,11 @@ impl ExpandExpression for AnyBlockShape {
|
||||
|
||||
match block {
|
||||
Some((block, _tags)) => {
|
||||
let mut iterator = TokensIterator::new(&block.item, context.span, false);
|
||||
let mut iterator = TokensIterator::new(&block.item, block.span, false);
|
||||
|
||||
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?;
|
||||
|
||||
return Ok(hir::RawExpression::Block(exprs).spanned(block.span));
|
||||
return Ok(hir::RawExpression::Block(exprs.item).spanned(block.span));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -169,6 +179,10 @@ impl FallibleColorSyntax for ShorthandBlock {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ShorthandBlock"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -195,14 +209,18 @@ impl FallibleColorSyntax for ShorthandBlock {
|
||||
}
|
||||
|
||||
impl ExpandExpression for ShorthandBlock {
|
||||
fn name(&self) -> &'static str {
|
||||
"shorthand block"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let path = expand_expr(&ShorthandPath, token_nodes, context)?;
|
||||
let start = path.span;
|
||||
let expr = continue_expression(path, token_nodes, context)?;
|
||||
let expr = continue_expression(path, token_nodes, context);
|
||||
let end = expr.span;
|
||||
let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end));
|
||||
|
||||
@ -264,6 +282,10 @@ impl FallibleColorSyntax for ShorthandPath {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ShorthandPath"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -304,11 +326,15 @@ impl FallibleColorSyntax for ShorthandPath {
|
||||
}
|
||||
|
||||
impl ExpandExpression for ShorthandPath {
|
||||
fn name(&self) -> &'static str {
|
||||
"shorthand path"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
// if it's a variable path, that's the head part
|
||||
let path = expand_expr(&VariablePathShape, token_nodes, context);
|
||||
|
||||
@ -326,7 +352,7 @@ impl ExpandExpression for ShorthandPath {
|
||||
|
||||
match tail {
|
||||
Err(_) => return Ok(head),
|
||||
Ok((tail, _)) => {
|
||||
Ok(Spanned { item: tail, .. }) => {
|
||||
// For each member that `PathTailShape` expanded, join it onto the existing expression
|
||||
// to form a new path
|
||||
for member in tail {
|
||||
@ -433,11 +459,15 @@ impl FallibleColorSyntax for ShorthandHeadShape {
|
||||
}
|
||||
|
||||
impl ExpandExpression for ShorthandHeadShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"shorthand head"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
// A shorthand path must not be at EOF
|
||||
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
|
||||
|
||||
@ -482,7 +512,7 @@ impl ExpandExpression for ShorthandHeadShape {
|
||||
|
||||
// Any other token is not a valid bare head
|
||||
other => {
|
||||
return Err(ShellError::type_error(
|
||||
return Err(ParseError::mismatch(
|
||||
"shorthand path",
|
||||
other.tagged_type_name(),
|
||||
))
|
||||
|
@ -12,7 +12,7 @@ use crate::parser::hir::syntax_shape::{
|
||||
color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom,
|
||||
expand_delimited_square, expand_expr, expand_syntax, AtomicToken, BareShape, ColorableDotShape,
|
||||
DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation,
|
||||
ExpressionContinuationShape, FallibleColorSyntax, FlatShape,
|
||||
ExpressionContinuationShape, FallibleColorSyntax, FlatShape, ParseError,
|
||||
};
|
||||
use crate::parser::{
|
||||
hir,
|
||||
@ -25,15 +25,19 @@ use std::path::PathBuf;
|
||||
pub struct AnyExpressionShape;
|
||||
|
||||
impl ExpandExpression for AnyExpressionShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"any expression"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
// Look for an expression at the cursor
|
||||
let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?;
|
||||
|
||||
continue_expression(head, token_nodes, context)
|
||||
Ok(continue_expression(head, token_nodes, context))
|
||||
}
|
||||
}
|
||||
|
||||
@ -69,6 +73,10 @@ impl FallibleColorSyntax for AnyExpressionShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"AnyExpressionShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -94,14 +102,14 @@ pub(crate) fn continue_expression(
|
||||
mut head: hir::Expression,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> hir::Expression {
|
||||
loop {
|
||||
// Check to see whether there's any continuation after the head expression
|
||||
let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context);
|
||||
|
||||
match continuation {
|
||||
// If there's no continuation, return the head
|
||||
Err(_) => return Ok(head),
|
||||
Err(_) => return head,
|
||||
// Otherwise, form a new expression by combining the head with the continuation
|
||||
Ok(continuation) => match continuation {
|
||||
// If the continuation is a `.member`, form a path with the new member
|
||||
@ -170,11 +178,15 @@ pub(crate) fn continue_coloring_expression(
|
||||
pub struct AnyExpressionStartShape;
|
||||
|
||||
impl ExpandExpression for AnyExpressionStartShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"any expression start"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?;
|
||||
|
||||
match atom.item {
|
||||
@ -267,6 +279,10 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"AnyExpressionStartShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -315,7 +331,7 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
|
||||
token_nodes.color_shape(FlatShape::Word.spanned(atom.span));
|
||||
}
|
||||
|
||||
_ => atom.color_tokens(token_nodes.mut_shapes()),
|
||||
_ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -387,13 +403,17 @@ impl FallibleColorSyntax for BareTailShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"BareTailShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let len = token_nodes.shapes().len();
|
||||
let len = token_nodes.state().shapes().len();
|
||||
|
||||
loop {
|
||||
let word =
|
||||
@ -422,7 +442,7 @@ impl FallibleColorSyntax for BareTailShape {
|
||||
}
|
||||
}
|
||||
|
||||
if token_nodes.shapes().len() > len {
|
||||
if token_nodes.state().shapes().len() > len {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ShellError::syntax_error(
|
||||
@ -433,13 +453,17 @@ impl FallibleColorSyntax for BareTailShape {
|
||||
}
|
||||
|
||||
impl ExpandSyntax for BareTailShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"word continuation"
|
||||
}
|
||||
|
||||
type Output = Option<Span>;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Option<Span>, ShellError> {
|
||||
) -> Result<Option<Span>, ParseError> {
|
||||
let mut end: Option<Span> = None;
|
||||
|
||||
loop {
|
||||
|
@ -90,40 +90,40 @@ impl<'tokens> SpannedAtomicToken<'tokens> {
|
||||
&self,
|
||||
context: &ExpandContext,
|
||||
expected: &'static str,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
Ok(match &self.item {
|
||||
AtomicToken::Eof { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"eof atomic token".tagged(self.span),
|
||||
))
|
||||
}
|
||||
AtomicToken::Error { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"eof atomic token".tagged(self.span),
|
||||
))
|
||||
}
|
||||
AtomicToken::Operator { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
"operator".tagged(self.span),
|
||||
))
|
||||
return Err(ParseError::mismatch(expected, "operator".tagged(self.span)))
|
||||
}
|
||||
AtomicToken::ShorthandFlag { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"shorthand flag".tagged(self.span),
|
||||
))
|
||||
}
|
||||
AtomicToken::LonghandFlag { .. } => {
|
||||
return Err(ShellError::type_error(expected, "flag".tagged(self.span)))
|
||||
return Err(ParseError::mismatch(expected, "flag".tagged(self.span)))
|
||||
}
|
||||
AtomicToken::Whitespace { .. } => {
|
||||
return Err(ShellError::unimplemented("whitespace in AtomicToken"))
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"whitespace".tagged(self.span),
|
||||
))
|
||||
}
|
||||
AtomicToken::Dot { .. } => {
|
||||
return Err(ShellError::type_error(expected, "dot".tagged(self.span)))
|
||||
return Err(ParseError::mismatch(expected, "dot".tagged(self.span)))
|
||||
}
|
||||
AtomicToken::Number { number } => {
|
||||
Expression::number(number.to_number(context.source), self.span)
|
||||
@ -142,7 +142,10 @@ impl<'tokens> SpannedAtomicToken<'tokens> {
|
||||
Expression::external_command(*command, self.span)
|
||||
}
|
||||
AtomicToken::ExternalWord { text } => Expression::string(*text, self.span),
|
||||
AtomicToken::GlobPattern { pattern } => Expression::pattern(*pattern),
|
||||
AtomicToken::GlobPattern { pattern } => Expression::pattern(
|
||||
expand_file_path(pattern.slice(context.source), context).to_string_lossy(),
|
||||
self.span,
|
||||
),
|
||||
AtomicToken::Word { text } => Expression::string(*text, *text),
|
||||
AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"),
|
||||
AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"),
|
||||
@ -378,7 +381,7 @@ pub fn expand_atom<'me, 'content>(
|
||||
expected: &'static str,
|
||||
context: &ExpandContext,
|
||||
rule: ExpansionRule,
|
||||
) -> Result<SpannedAtomicToken<'content>, ShellError> {
|
||||
) -> Result<SpannedAtomicToken<'content>, ParseError> {
|
||||
if token_nodes.at_end() {
|
||||
match rule.allow_eof {
|
||||
true => {
|
||||
@ -387,7 +390,7 @@ pub fn expand_atom<'me, 'content>(
|
||||
}
|
||||
.spanned(Span::unknown()))
|
||||
}
|
||||
false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())),
|
||||
false => return Err(ParseError::unexpected_eof("anything", Span::unknown())),
|
||||
}
|
||||
}
|
||||
|
||||
@ -512,12 +515,13 @@ pub fn expand_atom<'me, 'content>(
|
||||
|
||||
// if whitespace is disallowed, return an error
|
||||
WhitespaceHandling::RejectWhitespace => {
|
||||
return Err(ShellError::syntax_error("Unexpected whitespace".tagged(
|
||||
Tag {
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"whitespace".tagged(Tag {
|
||||
span: *span,
|
||||
anchor: None,
|
||||
},
|
||||
)))
|
||||
}),
|
||||
))
|
||||
}
|
||||
},
|
||||
|
||||
@ -541,7 +545,7 @@ pub fn expand_atom<'me, 'content>(
|
||||
RawToken::Operator(_) if !rule.allow_operator => return Err(err.error()),
|
||||
// rule.allow_external_command
|
||||
RawToken::ExternalCommand(_) if !rule.allow_external_command => {
|
||||
return Err(ShellError::type_error(
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
token.type_name().tagged(Tag {
|
||||
span: token_span,
|
||||
@ -551,10 +555,13 @@ pub fn expand_atom<'me, 'content>(
|
||||
}
|
||||
// rule.allow_external_word
|
||||
RawToken::ExternalWord if !rule.allow_external_word => {
|
||||
return Err(ShellError::invalid_external_word(Tag {
|
||||
span: token_span,
|
||||
anchor: None,
|
||||
}))
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"external word".tagged(Tag {
|
||||
span: token_span,
|
||||
anchor: None,
|
||||
}),
|
||||
))
|
||||
}
|
||||
|
||||
RawToken::Number(number) => AtomicToken::Number { number }.spanned(token_span),
|
||||
|
@ -8,12 +8,15 @@ pub fn expand_delimited_square(
|
||||
children: &Vec<TokenNode>,
|
||||
span: Span,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let mut tokens = TokensIterator::new(&children, span, false);
|
||||
|
||||
let list = expand_syntax(&ExpressionListShape, &mut tokens, context);
|
||||
|
||||
Ok(hir::Expression::list(list?, Tag { span, anchor: None }))
|
||||
Ok(hir::Expression::list(
|
||||
list?.item,
|
||||
Tag { span, anchor: None },
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(not(coloring_in_tokens))]
|
||||
@ -66,6 +69,11 @@ impl ColorSyntax for DelimitedShape {
|
||||
impl ColorSyntax for DelimitedShape {
|
||||
type Info = ();
|
||||
type Input = (Delimiter, Span, Span);
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"DelimitedShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
(delimiter, open, close): &(Delimiter, Span, Span),
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::parser::hir::syntax_shape::expression::atom::{expand_atom, AtomicToken, ExpansionRule};
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape,
|
||||
ParseError,
|
||||
};
|
||||
use crate::parser::{hir, hir::TokensIterator};
|
||||
use crate::prelude::*;
|
||||
@ -52,6 +53,10 @@ impl FallibleColorSyntax for FilePathShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"FilePathShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -78,7 +83,7 @@ impl FallibleColorSyntax for FilePathShape {
|
||||
token_nodes.color_shape(FlatShape::Path.spanned(atom.span));
|
||||
}
|
||||
|
||||
_ => atom.color_tokens(token_nodes.mut_shapes()),
|
||||
_ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -86,11 +91,15 @@ impl FallibleColorSyntax for FilePathShape {
|
||||
}
|
||||
|
||||
impl ExpandExpression for FilePathShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"file path"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "file path", context, ExpansionRule::new())?;
|
||||
|
||||
match atom.item {
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::errors::ShellError;
|
||||
use crate::errors::ParseError;
|
||||
#[cfg(not(coloring_in_tokens))]
|
||||
use crate::parser::hir::syntax_shape::FlatShape;
|
||||
use crate::parser::{
|
||||
@ -10,24 +10,36 @@ use crate::parser::{
|
||||
},
|
||||
hir::TokensIterator,
|
||||
};
|
||||
#[cfg(not(coloring_in_tokens))]
|
||||
use crate::Spanned;
|
||||
use crate::{DebugFormatter, FormatDebug, Spanned, SpannedItem};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExpressionListShape;
|
||||
|
||||
impl FormatDebug for Spanned<Vec<hir::Expression>> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
FormatDebug::fmt_debug(&self.item, f, source)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for ExpressionListShape {
|
||||
type Output = Vec<hir::Expression>;
|
||||
type Output = Spanned<Vec<hir::Expression>>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"expression list"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Vec<hir::Expression>, ShellError> {
|
||||
) -> Result<Spanned<Vec<hir::Expression>>, ParseError> {
|
||||
let mut exprs = vec![];
|
||||
|
||||
let start = token_nodes.span_at_cursor();
|
||||
|
||||
if token_nodes.at_end_possible_ws() {
|
||||
return Ok(exprs);
|
||||
return Ok(exprs.spanned(start));
|
||||
}
|
||||
|
||||
let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?;
|
||||
@ -36,7 +48,8 @@ impl ExpandSyntax for ExpressionListShape {
|
||||
|
||||
loop {
|
||||
if token_nodes.at_end_possible_ws() {
|
||||
return Ok(exprs);
|
||||
let end = token_nodes.span_at_cursor();
|
||||
return Ok(exprs.spanned(start.until(end)));
|
||||
}
|
||||
|
||||
let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?;
|
||||
@ -121,6 +134,10 @@ impl ColorSyntax for ExpressionListShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ExpressionListShape"
|
||||
}
|
||||
|
||||
/// The intent of this method is to fully color an expression list shape infallibly.
|
||||
/// This means that if we can't expand a token into an expression, we fall back to
|
||||
/// a simpler coloring strategy.
|
||||
@ -148,12 +165,12 @@ impl ColorSyntax for ExpressionListShape {
|
||||
}
|
||||
|
||||
if backoff {
|
||||
let len = token_nodes.shapes().len();
|
||||
let len = token_nodes.state().shapes().len();
|
||||
|
||||
// If we previously encountered a parsing error, use backoff coloring mode
|
||||
color_syntax(&SimplestExpression, token_nodes, context);
|
||||
|
||||
if len == token_nodes.shapes().len() && !token_nodes.at_end() {
|
||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
||||
// This should never happen, but if it does, a panic is better than an infinite loop
|
||||
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
|
||||
}
|
||||
@ -222,6 +239,10 @@ impl ColorSyntax for BackoffColoringMode {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"BackoffColoringMode"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &Self::Input,
|
||||
@ -233,12 +254,12 @@ impl ColorSyntax for BackoffColoringMode {
|
||||
break;
|
||||
}
|
||||
|
||||
let len = token_nodes.shapes().len();
|
||||
let len = token_nodes.state().shapes().len();
|
||||
color_syntax(&SimplestExpression, token_nodes, context);
|
||||
|
||||
if len == token_nodes.shapes().len() && !token_nodes.at_end() {
|
||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
||||
// This shouldn't happen, but if it does, a panic is better than an infinite loop
|
||||
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.shapes());
|
||||
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -281,6 +302,10 @@ impl ColorSyntax for SimplestExpression {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"SimplestExpression"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -296,7 +321,7 @@ impl ColorSyntax for SimplestExpression {
|
||||
|
||||
match atom {
|
||||
Err(_) => {}
|
||||
Ok(atom) => atom.color_tokens(token_nodes.mut_shapes()),
|
||||
Ok(atom) => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,8 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule,
|
||||
FallibleColorSyntax, FlatShape,
|
||||
FallibleColorSyntax, FlatShape, ParseError, TestSyntax,
|
||||
};
|
||||
use crate::parser::hir::tokens_iterator::Peeked;
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::{RawNumber, TokensIterator},
|
||||
@ -13,11 +14,15 @@ use crate::prelude::*;
|
||||
pub struct NumberShape;
|
||||
|
||||
impl ExpandExpression for NumberShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"number"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
parse_single_node(token_nodes, "Number", |token, token_span, err| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
|
||||
@ -28,10 +33,13 @@ impl ExpandExpression for NumberShape {
|
||||
hir::Expression::external_command(tag, token_span)
|
||||
}
|
||||
RawToken::ExternalWord => {
|
||||
return Err(ShellError::invalid_external_word(Tag {
|
||||
span: token_span,
|
||||
anchor: None,
|
||||
}))
|
||||
return Err(ParseError::mismatch(
|
||||
"number",
|
||||
"syntax error".tagged(Tag {
|
||||
span: token_span,
|
||||
anchor: None,
|
||||
}),
|
||||
))
|
||||
}
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token_span),
|
||||
RawToken::Number(number) => {
|
||||
@ -79,6 +87,10 @@ impl FallibleColorSyntax for NumberShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"NumberShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -97,7 +109,7 @@ impl FallibleColorSyntax for NumberShape {
|
||||
Spanned { item: Ok(atom), .. } => atom,
|
||||
};
|
||||
|
||||
atom.color_tokens(token_nodes.mut_shapes());
|
||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -107,16 +119,19 @@ impl FallibleColorSyntax for NumberShape {
|
||||
pub struct IntShape;
|
||||
|
||||
impl ExpandExpression for IntShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"integer"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
parse_single_node(token_nodes, "Integer", |token, token_span, err| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
|
||||
RawToken::ExternalWord => {
|
||||
return Err(ShellError::invalid_external_word(token_span))
|
||||
RawToken::GlobPattern | RawToken::Operator(..) | RawToken::ExternalWord => {
|
||||
return Err(err.error())
|
||||
}
|
||||
RawToken::Variable(span) if span.slice(context.source) == "it" => {
|
||||
hir::Expression::it_variable(span, token_span)
|
||||
@ -171,6 +186,10 @@ impl FallibleColorSyntax for IntShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"IntShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -189,8 +208,23 @@ impl FallibleColorSyntax for IntShape {
|
||||
Spanned { item: Ok(atom), .. } => atom,
|
||||
};
|
||||
|
||||
atom.color_tokens(token_nodes.mut_shapes());
|
||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl TestSyntax for NumberShape {
|
||||
fn test<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Option<Peeked<'a, 'b>> {
|
||||
let peeked = token_nodes.peek_any();
|
||||
|
||||
match peeked.node {
|
||||
Some(token) if token.is_number() => Some(peeked),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
expand_atom, expand_bare, expand_syntax, expression::expand_file_path, parse_single_node,
|
||||
AtomicToken, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax,
|
||||
FlatShape,
|
||||
expand_atom, expand_bare, expression::expand_file_path, AtomicToken, ExpandContext,
|
||||
ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError,
|
||||
};
|
||||
use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode};
|
||||
use crate::prelude::*;
|
||||
@ -41,6 +40,10 @@ impl FallibleColorSyntax for PatternShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"PatternShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -63,48 +66,26 @@ impl FallibleColorSyntax for PatternShape {
|
||||
}
|
||||
|
||||
impl ExpandExpression for PatternShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"glob pattern"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let pattern = expand_syntax(&BarePatternShape, token_nodes, context);
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::new())?;
|
||||
|
||||
match pattern {
|
||||
Ok(tag) => {
|
||||
return Ok(hir::Expression::pattern(tag));
|
||||
match atom.item {
|
||||
AtomicToken::Word { text: body }
|
||||
| AtomicToken::String { body }
|
||||
| AtomicToken::GlobPattern { pattern: body } => {
|
||||
let path = expand_file_path(body.slice(context.source), context);
|
||||
return Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span));
|
||||
}
|
||||
Err(_) => {}
|
||||
_ => return atom.into_hir(context, "pattern"),
|
||||
}
|
||||
|
||||
parse_single_node(token_nodes, "Pattern", |token, token_tag, _| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern => {
|
||||
return Err(ShellError::unreachable(
|
||||
"glob pattern after glob already returned",
|
||||
))
|
||||
}
|
||||
RawToken::Operator(..) => {
|
||||
return Err(ShellError::unreachable("dot after glob already returned"))
|
||||
}
|
||||
RawToken::Bare => {
|
||||
return Err(ShellError::unreachable("bare after glob already returned"))
|
||||
}
|
||||
|
||||
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token_tag)
|
||||
}
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||
RawToken::Number(_) => hir::Expression::bare(token_tag),
|
||||
|
||||
RawToken::String(tag) => hir::Expression::file_path(
|
||||
expand_file_path(tag.slice(context.source), context),
|
||||
token_tag,
|
||||
),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -114,11 +95,15 @@ pub struct BarePatternShape;
|
||||
impl ExpandSyntax for BarePatternShape {
|
||||
type Output = Span;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"bare pattern"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Span, ShellError> {
|
||||
) -> Result<Span, ParseError> {
|
||||
expand_bare(token_nodes, context, |token| match token {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
|
@ -1,9 +1,9 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
|
||||
ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax,
|
||||
ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, TestSyntax,
|
||||
};
|
||||
use crate::parser::hir::tokens_iterator::Peeked;
|
||||
use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode};
|
||||
use crate::parser::{hir, hir::TokensIterator, RawToken};
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
@ -45,6 +45,10 @@ impl FallibleColorSyntax for StringShape {
|
||||
type Info = ();
|
||||
type Input = FlatShape;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"StringShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
input: &FlatShape,
|
||||
@ -63,7 +67,7 @@ impl FallibleColorSyntax for StringShape {
|
||||
item: AtomicToken::String { .. },
|
||||
span,
|
||||
} => token_nodes.color_shape((*input).spanned(span)),
|
||||
other => other.color_tokens(token_nodes.mut_shapes()),
|
||||
atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -71,32 +75,24 @@ impl FallibleColorSyntax for StringShape {
|
||||
}
|
||||
|
||||
impl ExpandExpression for StringShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"string"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
parse_single_node(token_nodes, "String", |token, token_span, _| {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
parse_single_node(token_nodes, "String", |token, token_span, err| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern => {
|
||||
return Err(ShellError::type_error(
|
||||
"String",
|
||||
"glob pattern".tagged(token_span),
|
||||
))
|
||||
}
|
||||
RawToken::Operator(..) => {
|
||||
return Err(ShellError::type_error(
|
||||
"String",
|
||||
"operator".tagged(token_span),
|
||||
))
|
||||
RawToken::GlobPattern | RawToken::Operator(..) | RawToken::ExternalWord => {
|
||||
return Err(err.error())
|
||||
}
|
||||
RawToken::Variable(span) => expand_variable(span, token_span, &context.source),
|
||||
RawToken::ExternalCommand(span) => {
|
||||
hir::Expression::external_command(span, token_span)
|
||||
}
|
||||
RawToken::ExternalWord => {
|
||||
return Err(ShellError::invalid_external_word(token_span))
|
||||
}
|
||||
RawToken::Number(_) => hir::Expression::bare(token_span),
|
||||
RawToken::Bare => hir::Expression::bare(token_span),
|
||||
RawToken::String(span) => hir::Expression::string(span, token_span),
|
||||
@ -114,11 +110,7 @@ impl TestSyntax for StringShape {
|
||||
let peeked = token_nodes.peek_any();
|
||||
|
||||
match peeked.node {
|
||||
Some(TokenNode::Token(token)) => match token.item {
|
||||
RawToken::String(_) => Some(peeked),
|
||||
_ => None,
|
||||
},
|
||||
|
||||
Some(token) if token.is_string() => Some(peeked),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::data::meta::Span;
|
||||
use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax};
|
||||
use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax, ParseError};
|
||||
use crate::parser::parse::tokens::RawNumber;
|
||||
use crate::parser::parse::unit::Unit;
|
||||
use crate::parser::{hir::TokensIterator, RawToken, TokenNode};
|
||||
@ -9,18 +9,34 @@ use nom::bytes::complete::tag;
|
||||
use nom::character::complete::digit1;
|
||||
use nom::combinator::{all_consuming, opt, value};
|
||||
use nom::IResult;
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct UnitShape;
|
||||
|
||||
impl FormatDebug for Spanned<(Spanned<RawNumber>, Spanned<Unit>)> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
let dict = indexmap::indexmap! {
|
||||
"number" => format!("{}", self.item.0.item.debug(source)),
|
||||
"unit" => format!("{}", self.item.1.debug(source)),
|
||||
};
|
||||
|
||||
f.say_dict("unit", dict)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for UnitShape {
|
||||
type Output = Spanned<(Spanned<RawNumber>, Spanned<Unit>)>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"unit"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Spanned<(Spanned<RawNumber>, Spanned<Unit>)>, ShellError> {
|
||||
) -> Result<Spanned<(Spanned<RawNumber>, Spanned<Unit>)>, ParseError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("unit")?;
|
||||
|
||||
let span = match peeked.node {
|
||||
@ -34,12 +50,7 @@ impl ExpandSyntax for UnitShape {
|
||||
let unit = unit_size(span.slice(context.source), *span);
|
||||
|
||||
let (_, (number, unit)) = match unit {
|
||||
Err(_) => {
|
||||
return Err(ShellError::type_error(
|
||||
"unit",
|
||||
"word".tagged(Tag::unknown()),
|
||||
))
|
||||
}
|
||||
Err(_) => return Err(ParseError::mismatch("unit", "word".tagged(Tag::unknown()))),
|
||||
Ok((number, unit)) => (number, unit),
|
||||
};
|
||||
|
||||
@ -78,9 +89,9 @@ fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned<RawNumber>,
|
||||
value(Unit::B, alt((tag("B"), tag("b")))),
|
||||
value(Unit::KB, alt((tag("KB"), tag("kb"), tag("Kb")))),
|
||||
value(Unit::MB, alt((tag("MB"), tag("mb"), tag("Mb")))),
|
||||
value(Unit::MB, alt((tag("GB"), tag("gb"), tag("Gb")))),
|
||||
value(Unit::MB, alt((tag("TB"), tag("tb"), tag("Tb")))),
|
||||
value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))),
|
||||
value(Unit::GB, alt((tag("GB"), tag("gb"), tag("Gb")))),
|
||||
value(Unit::TB, alt((tag("TB"), tag("tb"), tag("Tb")))),
|
||||
value(Unit::PB, alt((tag("PB"), tag("pb"), tag("Pb")))),
|
||||
)))(input)?;
|
||||
|
||||
let start_span = number.span.end();
|
||||
|
@ -1,21 +1,28 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax,
|
||||
parse_single_node, AnyExpressionShape, AtomicToken, BareShape, ExpandContext, ExpandExpression,
|
||||
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, Peeked, SkipSyntax, StringShape,
|
||||
TestSyntax, WhitespaceShape,
|
||||
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, Peeked, SkipSyntax,
|
||||
StringShape, TestSyntax, WhitespaceShape,
|
||||
};
|
||||
use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken};
|
||||
use crate::prelude::*;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct VariablePathShape;
|
||||
|
||||
impl ExpandExpression for VariablePathShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"variable path"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
// 1. let the head be the first token, expecting a variable
|
||||
// 2. let the tail be an empty list of members
|
||||
// 2. while the next token (excluding ws) is a dot:
|
||||
@ -90,6 +97,10 @@ impl FallibleColorSyntax for VariablePathShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"VariablePathShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -166,6 +177,10 @@ impl FallibleColorSyntax for PathTailShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"PathTailShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -192,12 +207,17 @@ impl FallibleColorSyntax for PathTailShape {
|
||||
}
|
||||
|
||||
impl ExpandSyntax for PathTailShape {
|
||||
type Output = (Vec<Spanned<String>>, Span);
|
||||
type Output = Spanned<Vec<Spanned<String>>>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"path continuation"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let mut end: Option<Span> = None;
|
||||
let mut tail = vec![];
|
||||
|
||||
@ -215,7 +235,7 @@ impl ExpandSyntax for PathTailShape {
|
||||
|
||||
match end {
|
||||
None => {
|
||||
return Err(ShellError::type_error("path tail", {
|
||||
return Err(ParseError::mismatch("path tail", {
|
||||
let typed_span = token_nodes.typed_span_at_cursor();
|
||||
|
||||
Tagged {
|
||||
@ -225,17 +245,41 @@ impl ExpandSyntax for PathTailShape {
|
||||
}))
|
||||
}
|
||||
|
||||
Some(end) => Ok((tail, end)),
|
||||
Some(end) => Ok(tail.spanned(end)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ExpressionContinuation {
|
||||
DotSuffix(Span, Spanned<String>),
|
||||
InfixSuffix(Spanned<Operator>, Expression),
|
||||
}
|
||||
|
||||
impl FormatDebug for ExpressionContinuation {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
match self {
|
||||
ExpressionContinuation::DotSuffix(dot, rest) => {
|
||||
f.say_str("dot suffix", dot.until(rest.span).slice(source))
|
||||
}
|
||||
ExpressionContinuation::InfixSuffix(operator, expr) => {
|
||||
f.say_str("infix suffix", operator.span.until(expr.span).slice(source))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ExpressionContinuation {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
ExpressionContinuation::DotSuffix(dot, column) => dot.until(column.span),
|
||||
ExpressionContinuation::InfixSuffix(operator, expression) => {
|
||||
operator.span.until(expression.span)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An expression continuation
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExpressionContinuationShape;
|
||||
@ -243,11 +287,15 @@ pub struct ExpressionContinuationShape;
|
||||
impl ExpandSyntax for ExpressionContinuationShape {
|
||||
type Output = ExpressionContinuation;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"expression continuation"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<ExpressionContinuation, ShellError> {
|
||||
) -> Result<ExpressionContinuation, ParseError> {
|
||||
// Try to expand a `.`
|
||||
let dot = expand_syntax(&DotShape, token_nodes, context);
|
||||
|
||||
@ -262,7 +310,7 @@ impl ExpandSyntax for ExpressionContinuationShape {
|
||||
|
||||
// Otherwise, we expect an infix operator and an expression next
|
||||
Err(_) => {
|
||||
let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?;
|
||||
let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?.item;
|
||||
let next = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
||||
|
||||
Ok(ExpressionContinuation::InfixSuffix(op, next))
|
||||
@ -334,6 +382,10 @@ impl FallibleColorSyntax for ExpressionContinuationShape {
|
||||
type Info = ContinuationInfo;
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ExpressionContinuationShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -378,12 +430,16 @@ impl FallibleColorSyntax for ExpressionContinuationShape {
|
||||
pub struct VariableShape;
|
||||
|
||||
impl ExpandExpression for VariableShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"variable"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
parse_single_node(token_nodes, "variable", |token, token_tag, _| {
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
parse_single_node(token_nodes, "variable", |token, token_tag, err| {
|
||||
Ok(match token {
|
||||
RawToken::Variable(tag) => {
|
||||
if tag.slice(context.source) == "it" {
|
||||
@ -392,12 +448,7 @@ impl ExpandExpression for VariableShape {
|
||||
hir::Expression::variable(tag, token_tag)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"variable",
|
||||
token.type_name().tagged(token_tag),
|
||||
))
|
||||
}
|
||||
_ => return Err(err.error()),
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -423,7 +474,7 @@ impl FallibleColorSyntax for VariableShape {
|
||||
);
|
||||
|
||||
let atom = match atom {
|
||||
Err(err) => return Err(err),
|
||||
Err(err) => return Err(err.into()),
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
@ -446,6 +497,10 @@ impl FallibleColorSyntax for VariableShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"VariableShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -460,7 +515,7 @@ impl FallibleColorSyntax for VariableShape {
|
||||
);
|
||||
|
||||
let atom = match atom {
|
||||
Err(err) => return Err(err),
|
||||
Err(err) => return Err(err.into()),
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
@ -473,7 +528,7 @@ impl FallibleColorSyntax for VariableShape {
|
||||
token_nodes.color_shape(FlatShape::ItVariable.spanned(atom.span));
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(ShellError::type_error("variable", atom.tagged_type_name())),
|
||||
_ => Err(ParseError::mismatch("variable", atom.tagged_type_name()).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -484,6 +539,24 @@ pub enum Member {
|
||||
Bare(Span),
|
||||
}
|
||||
|
||||
impl FormatDebug for Member {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
match self {
|
||||
Member::String(outer, _) => write!(f, "member ({})", outer.slice(source)),
|
||||
Member::Bare(bare) => write!(f, "member ({})", bare.slice(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Member {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
Member::String(outer, ..) => *outer,
|
||||
Member::Bare(name) => *name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Member {
|
||||
pub(crate) fn to_expr(&self) -> hir::Expression {
|
||||
match self {
|
||||
@ -522,7 +595,7 @@ enum ColumnPathState {
|
||||
LeadingDot(Span),
|
||||
Dot(Span, Vec<Member>, Span),
|
||||
Member(Span, Vec<Member>),
|
||||
Error(ShellError),
|
||||
Error(ParseError),
|
||||
}
|
||||
|
||||
impl ColumnPathState {
|
||||
@ -530,10 +603,10 @@ impl ColumnPathState {
|
||||
match self {
|
||||
ColumnPathState::Initial => ColumnPathState::LeadingDot(dot),
|
||||
ColumnPathState::LeadingDot(_) => {
|
||||
ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot)))
|
||||
ColumnPathState::Error(ParseError::mismatch("column", "dot".tagged(dot)))
|
||||
}
|
||||
ColumnPathState::Dot(..) => {
|
||||
ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot)))
|
||||
ColumnPathState::Error(ParseError::mismatch("column", "dot".tagged(dot)))
|
||||
}
|
||||
ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot),
|
||||
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
||||
@ -554,20 +627,20 @@ impl ColumnPathState {
|
||||
})
|
||||
}
|
||||
ColumnPathState::Member(..) => {
|
||||
ColumnPathState::Error(ShellError::type_error("column", member.tagged_type_name()))
|
||||
ColumnPathState::Error(ParseError::mismatch("column", member.tagged_type_name()))
|
||||
}
|
||||
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_path(self, next: Peeked) -> Result<Tagged<Vec<Member>>, ShellError> {
|
||||
pub fn into_path(self, next: Peeked) -> Result<Tagged<Vec<Member>>, ParseError> {
|
||||
match self {
|
||||
ColumnPathState::Initial => Err(next.type_error("column path")),
|
||||
ColumnPathState::LeadingDot(dot) => {
|
||||
Err(ShellError::type_error("column", "dot".tagged(dot)))
|
||||
Err(ParseError::mismatch("column", "dot".tagged(dot)))
|
||||
}
|
||||
ColumnPathState::Dot(_tag, _members, dot) => {
|
||||
Err(ShellError::type_error("column", "dot".tagged(dot)))
|
||||
Err(ParseError::mismatch("column", "dot".tagged(dot)))
|
||||
}
|
||||
ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)),
|
||||
ColumnPathState::Error(err) => Err(err),
|
||||
@ -578,7 +651,7 @@ impl ColumnPathState {
|
||||
pub fn expand_column_path<'a, 'b>(
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Tagged<Vec<Member>>, ShellError> {
|
||||
) -> Result<Tagged<Vec<Member>>, ParseError> {
|
||||
let mut state = ColumnPathState::Initial;
|
||||
|
||||
loop {
|
||||
@ -658,6 +731,10 @@ impl FallibleColorSyntax for ColumnPathShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ColumnPathShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -700,15 +777,43 @@ impl FallibleColorSyntax for ColumnPathShape {
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatDebug for Tagged<Vec<Member>> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
self.item.fmt_debug(f, source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Getters, new)]
|
||||
pub struct ColumnPath {
|
||||
#[get = "pub"]
|
||||
path: Tagged<Vec<Member>>,
|
||||
}
|
||||
|
||||
impl HasSpan for ColumnPath {
|
||||
fn span(&self) -> Span {
|
||||
self.path.tag.span
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatDebug for ColumnPath {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
f.say("column path", self.path.item.debug(source))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for ColumnPathShape {
|
||||
type Output = Tagged<Vec<Member>>;
|
||||
type Output = ColumnPath;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"column path"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
expand_column_path(token_nodes, context)
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
Ok(ColumnPath::new(expand_column_path(token_nodes, context)?))
|
||||
}
|
||||
}
|
||||
|
||||
@ -758,6 +863,10 @@ impl FallibleColorSyntax for MemberShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"MemberShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -782,22 +891,36 @@ impl FallibleColorSyntax for MemberShape {
|
||||
impl ExpandSyntax for MemberShape {
|
||||
type Output = Member;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"column"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Member, ShellError> {
|
||||
) -> Result<Member, ParseError> {
|
||||
let bare = BareShape.test(token_nodes, context);
|
||||
if let Some(peeked) = bare {
|
||||
let node = peeked.not_eof("column")?.commit();
|
||||
return Ok(Member::Bare(node.span()));
|
||||
}
|
||||
|
||||
/* KATZ */
|
||||
/* let number = NumberShape.test(token_nodes, context);
|
||||
|
||||
if let Some(peeked) = number {
|
||||
let node = peeked.not_eof("column")?.commit();
|
||||
let (n, span) = node.as_number().unwrap();
|
||||
|
||||
return Ok(Member::Number(n, span))
|
||||
}*/
|
||||
|
||||
let string = StringShape.test(token_nodes, context);
|
||||
|
||||
if let Some(peeked) = string {
|
||||
let node = peeked.not_eof("column")?.commit();
|
||||
let (outer, inner) = node.expect_string();
|
||||
let (outer, inner) = node.as_string().unwrap();
|
||||
|
||||
return Ok(Member::String(outer, inner));
|
||||
}
|
||||
@ -843,6 +966,10 @@ impl FallibleColorSyntax for ColorableDotShape {
|
||||
type Info = ();
|
||||
type Input = FlatShape;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ColorableDotShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
input: &FlatShape,
|
||||
@ -878,16 +1005,20 @@ impl SkipSyntax for DotShape {
|
||||
impl ExpandSyntax for DotShape {
|
||||
type Output = Span;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"dot"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
parse_single_node(token_nodes, "dot", |token, token_span, _| {
|
||||
Ok(match token {
|
||||
RawToken::Operator(Operator::Dot) => token_span,
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
return Err(ParseError::mismatch(
|
||||
"dot",
|
||||
token.type_name().tagged(token_span),
|
||||
))
|
||||
@ -922,7 +1053,7 @@ impl FallibleColorSyntax for InfixShape {
|
||||
parse_single_node(
|
||||
checkpoint.iterator,
|
||||
"infix operator",
|
||||
|token, token_span, _| {
|
||||
|token, token_span, err| {
|
||||
match token {
|
||||
// If it's an operator (and not `.`), it's a match
|
||||
RawToken::Operator(operator) if operator != Operator::Dot => {
|
||||
@ -931,10 +1062,7 @@ impl FallibleColorSyntax for InfixShape {
|
||||
}
|
||||
|
||||
// Otherwise, it's not a match
|
||||
_ => Err(ShellError::type_error(
|
||||
"infix operator",
|
||||
token.type_name().tagged(token_span),
|
||||
)),
|
||||
_ => Err(err.error()),
|
||||
}
|
||||
},
|
||||
)?;
|
||||
@ -953,6 +1081,10 @@ impl FallibleColorSyntax for InfixShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"InfixShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
@ -971,13 +1103,10 @@ impl FallibleColorSyntax for InfixShape {
|
||||
|token, token_span, _| {
|
||||
match token {
|
||||
// If it's an operator (and not `.`), it's a match
|
||||
RawToken::Operator(operator) if operator != Operator::Dot => {
|
||||
// token_nodes.color_shape(FlatShape::Operator.spanned(token_span));
|
||||
Ok(token_span)
|
||||
}
|
||||
RawToken::Operator(operator) if operator != Operator::Dot => Ok(token_span),
|
||||
|
||||
// Otherwise, it's not a match
|
||||
_ => Err(ShellError::type_error(
|
||||
_ => Err(ParseError::mismatch(
|
||||
"infix operator",
|
||||
token.type_name().tagged(token_span),
|
||||
)),
|
||||
@ -997,46 +1126,72 @@ impl FallibleColorSyntax for InfixShape {
|
||||
}
|
||||
}
|
||||
|
||||
impl FormatDebug for Spanned<(Span, Spanned<Operator>, Span)> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
f.say_str("operator", self.item.1.span.slice(source))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for InfixShape {
|
||||
type Output = (Span, Spanned<Operator>, Span);
|
||||
type Output = Spanned<(Span, Spanned<Operator>, Span)>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"infix operator"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
let checkpoint = token_nodes.checkpoint();
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let mut checkpoint = token_nodes.checkpoint();
|
||||
|
||||
// An infix operator must be prefixed by whitespace
|
||||
let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||
|
||||
// Parse the next TokenNode after the whitespace
|
||||
let operator = parse_single_node(
|
||||
checkpoint.iterator,
|
||||
"infix operator",
|
||||
|token, token_span, _| {
|
||||
Ok(match token {
|
||||
// If it's an operator (and not `.`), it's a match
|
||||
RawToken::Operator(operator) if operator != Operator::Dot => {
|
||||
operator.spanned(token_span)
|
||||
}
|
||||
|
||||
// Otherwise, it's not a match
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"infix operator",
|
||||
token.type_name().tagged(token_span),
|
||||
))
|
||||
}
|
||||
})
|
||||
},
|
||||
)?;
|
||||
let operator = expand_syntax(&InfixInnerShape, &mut checkpoint.iterator, context)?;
|
||||
|
||||
// An infix operator must be followed by whitespace
|
||||
let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||
|
||||
checkpoint.commit();
|
||||
|
||||
Ok((start, operator, end))
|
||||
Ok((start, operator, end).spanned(start.until(end)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct InfixInnerShape;
|
||||
|
||||
impl FormatDebug for Spanned<Operator> {
|
||||
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
||||
f.say_str("operator", self.span.slice(source))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for InfixInnerShape {
|
||||
type Output = Spanned<Operator>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"infix inner"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
parse_single_node(token_nodes, "infix operator", |token, token_span, err| {
|
||||
Ok(match token {
|
||||
// If it's an operator (and not `.`), it's a match
|
||||
RawToken::Operator(operator) if operator != Operator::Dot => {
|
||||
operator.spanned(token_span)
|
||||
}
|
||||
|
||||
// Otherwise, it's not a match
|
||||
_ => return Err(err.error()),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode};
|
||||
use crate::{Span, Spanned, SpannedItem, Text};
|
||||
use crate::{HasSpan, Span, Spanned, SpannedItem, Text};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum FlatShape {
|
||||
|
@ -1,23 +1,51 @@
|
||||
pub(crate) mod debug;
|
||||
|
||||
use self::debug::{ColorTracer, ExpandTracer};
|
||||
use crate::errors::ShellError;
|
||||
#[cfg(coloring_in_tokens)]
|
||||
use crate::parser::hir::syntax_shape::FlatShape;
|
||||
use crate::parser::hir::Expression;
|
||||
use crate::parser::TokenNode;
|
||||
use crate::prelude::*;
|
||||
use crate::{Span, Spanned, SpannedItem};
|
||||
#[allow(unused)]
|
||||
use getset::Getters;
|
||||
use getset::{Getters, MutGetters};
|
||||
|
||||
#[derive(Getters, Debug)]
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(coloring_in_tokens)] {
|
||||
#[derive(Getters, Debug)]
|
||||
pub struct TokensIteratorState<'content> {
|
||||
tokens: &'content [TokenNode],
|
||||
span: Span,
|
||||
skip_ws: bool,
|
||||
index: usize,
|
||||
seen: indexmap::IndexSet<usize>,
|
||||
#[get = "pub"]
|
||||
shapes: Vec<Spanned<FlatShape>>,
|
||||
}
|
||||
} else {
|
||||
#[derive(Getters, Debug)]
|
||||
pub struct TokensIteratorState<'content> {
|
||||
tokens: &'content [TokenNode],
|
||||
span: Span,
|
||||
skip_ws: bool,
|
||||
index: usize,
|
||||
seen: indexmap::IndexSet<usize>,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Getters, MutGetters, Debug)]
|
||||
pub struct TokensIterator<'content> {
|
||||
tokens: &'content [TokenNode],
|
||||
span: Span,
|
||||
skip_ws: bool,
|
||||
index: usize,
|
||||
seen: indexmap::IndexSet<usize>,
|
||||
#[cfg(coloring_in_tokens)]
|
||||
#[get = "pub"]
|
||||
shapes: Vec<Spanned<FlatShape>>,
|
||||
#[get_mut = "pub"]
|
||||
state: TokensIteratorState<'content>,
|
||||
#[get = "pub"]
|
||||
#[get_mut = "pub"]
|
||||
color_tracer: ColorTracer,
|
||||
#[get = "pub"]
|
||||
#[get_mut = "pub"]
|
||||
expand_tracer: ExpandTracer,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -39,10 +67,12 @@ impl<'content, 'me> Checkpoint<'content, 'me> {
|
||||
impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
|
||||
fn drop(&mut self) {
|
||||
if !self.committed {
|
||||
self.iterator.index = self.index;
|
||||
self.iterator.seen = self.seen.clone();
|
||||
let state = &mut self.iterator.state;
|
||||
|
||||
state.index = self.index;
|
||||
state.seen = self.seen.clone();
|
||||
#[cfg(coloring_in_tokens)]
|
||||
self.iterator.shapes.truncate(self.shape_start);
|
||||
state.shapes.truncate(self.shape_start);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -69,12 +99,9 @@ impl<'content, 'me> Peeked<'content, 'me> {
|
||||
Some(node)
|
||||
}
|
||||
|
||||
pub fn not_eof(
|
||||
self,
|
||||
expected: impl Into<String>,
|
||||
) -> Result<PeekedNode<'content, 'me>, ShellError> {
|
||||
pub fn not_eof(self, expected: &'static str) -> Result<PeekedNode<'content, 'me>, ParseError> {
|
||||
match self.node {
|
||||
None => Err(ShellError::unexpected_eof(
|
||||
None => Err(ParseError::unexpected_eof(
|
||||
expected,
|
||||
self.iterator.eof_span(),
|
||||
)),
|
||||
@ -87,7 +114,7 @@ impl<'content, 'me> Peeked<'content, 'me> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
|
||||
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
||||
peek_error(&self.node, self.iterator.eof_span(), expected)
|
||||
}
|
||||
}
|
||||
@ -115,19 +142,15 @@ impl<'content, 'me> PeekedNode<'content, 'me> {
|
||||
|
||||
pub fn rollback(self) {}
|
||||
|
||||
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
|
||||
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
||||
peek_error(&Some(self.node), self.iterator.eof_span(), expected)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek_error(
|
||||
node: &Option<&TokenNode>,
|
||||
eof_span: Span,
|
||||
expected: impl Into<String>,
|
||||
) -> ShellError {
|
||||
pub fn peek_error(node: &Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError {
|
||||
match node {
|
||||
None => ShellError::unexpected_eof(expected, eof_span),
|
||||
Some(node) => ShellError::type_error(expected, node.tagged_type_name()),
|
||||
None => ParseError::unexpected_eof(expected, eof_span),
|
||||
Some(node) => ParseError::mismatch(expected, node.tagged_type_name()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -138,13 +161,17 @@ impl<'content> TokensIterator<'content> {
|
||||
skip_ws: bool,
|
||||
) -> TokensIterator<'content> {
|
||||
TokensIterator {
|
||||
tokens: items,
|
||||
span,
|
||||
skip_ws,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
#[cfg(coloring_in_tokens)]
|
||||
shapes: vec![],
|
||||
state: TokensIteratorState {
|
||||
tokens: items,
|
||||
span,
|
||||
skip_ws,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
#[cfg(coloring_in_tokens)]
|
||||
shapes: vec![],
|
||||
},
|
||||
color_tracer: ColorTracer::new(),
|
||||
expand_tracer: ExpandTracer::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -153,7 +180,7 @@ impl<'content> TokensIterator<'content> {
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.tokens.len()
|
||||
self.state.tokens.len()
|
||||
}
|
||||
|
||||
pub fn spanned<T>(
|
||||
@ -171,35 +198,214 @@ impl<'content> TokensIterator<'content> {
|
||||
|
||||
#[cfg(coloring_in_tokens)]
|
||||
pub fn color_shape(&mut self, shape: Spanned<FlatShape>) {
|
||||
self.shapes.push(shape);
|
||||
self.with_color_tracer(|_, tracer| tracer.add_shape(shape));
|
||||
self.state.shapes.push(shape);
|
||||
}
|
||||
|
||||
#[cfg(coloring_in_tokens)]
|
||||
pub fn mut_shapes(&mut self) -> &mut Vec<Spanned<FlatShape>> {
|
||||
&mut self.shapes
|
||||
pub fn mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
|
||||
let new_shapes: Vec<Spanned<FlatShape>> = {
|
||||
let shapes = &mut self.state.shapes;
|
||||
let len = shapes.len();
|
||||
block(shapes);
|
||||
(len..(shapes.len())).map(|i| shapes[i]).collect()
|
||||
};
|
||||
|
||||
self.with_color_tracer(|_, tracer| {
|
||||
for shape in new_shapes {
|
||||
tracer.add_shape(shape)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(coloring_in_tokens)]
|
||||
pub fn child<T>(
|
||||
&mut self,
|
||||
tokens: Spanned<&'content [TokenNode]>,
|
||||
block: impl FnOnce(&mut TokensIterator) -> T,
|
||||
pub fn silently_mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
|
||||
let shapes = &mut self.state.shapes;
|
||||
block(shapes);
|
||||
}
|
||||
|
||||
#[cfg(coloring_in_tokens)]
|
||||
pub fn sort_shapes(&mut self) {
|
||||
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
||||
// this solution.
|
||||
|
||||
self.state
|
||||
.shapes
|
||||
.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
|
||||
}
|
||||
|
||||
#[cfg(coloring_in_tokens)]
|
||||
pub fn child<'me, T>(
|
||||
&'me mut self,
|
||||
tokens: Spanned<&'me [TokenNode]>,
|
||||
block: impl FnOnce(&mut TokensIterator<'me>) -> T,
|
||||
) -> T {
|
||||
let mut shapes = vec![];
|
||||
std::mem::swap(&mut shapes, &mut self.shapes);
|
||||
std::mem::swap(&mut shapes, &mut self.state.shapes);
|
||||
|
||||
let mut color_tracer = ColorTracer::new();
|
||||
std::mem::swap(&mut color_tracer, &mut self.color_tracer);
|
||||
|
||||
let mut expand_tracer = ExpandTracer::new();
|
||||
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
|
||||
|
||||
let mut iterator = TokensIterator {
|
||||
tokens: tokens.item,
|
||||
span: tokens.span,
|
||||
skip_ws: false,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
shapes,
|
||||
state: TokensIteratorState {
|
||||
tokens: tokens.item,
|
||||
span: tokens.span,
|
||||
skip_ws: false,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
shapes,
|
||||
},
|
||||
color_tracer,
|
||||
expand_tracer,
|
||||
};
|
||||
|
||||
let result = block(&mut iterator);
|
||||
|
||||
std::mem::swap(&mut iterator.shapes, &mut self.shapes);
|
||||
std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes);
|
||||
std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer);
|
||||
std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
#[cfg(not(coloring_in_tokens))]
|
||||
pub fn child<'me, T>(
|
||||
&'me mut self,
|
||||
tokens: Spanned<&'me [TokenNode]>,
|
||||
block: impl FnOnce(&mut TokensIterator<'me>) -> T,
|
||||
) -> T {
|
||||
let mut color_tracer = ColorTracer::new();
|
||||
std::mem::swap(&mut color_tracer, &mut self.color_tracer);
|
||||
|
||||
let mut expand_tracer = ExpandTracer::new();
|
||||
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
|
||||
|
||||
let mut iterator = TokensIterator {
|
||||
state: TokensIteratorState {
|
||||
tokens: tokens.item,
|
||||
span: tokens.span,
|
||||
skip_ws: false,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
},
|
||||
color_tracer,
|
||||
expand_tracer,
|
||||
};
|
||||
|
||||
let result = block(&mut iterator);
|
||||
|
||||
std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer);
|
||||
std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn with_color_tracer(
|
||||
&mut self,
|
||||
block: impl FnOnce(&mut TokensIteratorState, &mut ColorTracer),
|
||||
) {
|
||||
let state = &mut self.state;
|
||||
let color_tracer = &mut self.color_tracer;
|
||||
|
||||
block(state, color_tracer)
|
||||
}
|
||||
|
||||
pub fn with_expand_tracer(
|
||||
&mut self,
|
||||
block: impl FnOnce(&mut TokensIteratorState, &mut ExpandTracer),
|
||||
) {
|
||||
let state = &mut self.state;
|
||||
let tracer = &mut self.expand_tracer;
|
||||
|
||||
block(state, tracer)
|
||||
}
|
||||
|
||||
#[cfg(coloring_in_tokens)]
|
||||
pub fn color_frame<T>(
|
||||
&mut self,
|
||||
desc: &'static str,
|
||||
block: impl FnOnce(&mut TokensIterator) -> T,
|
||||
) -> T {
|
||||
self.with_color_tracer(|_, tracer| tracer.start(desc));
|
||||
|
||||
let result = block(self);
|
||||
|
||||
self.with_color_tracer(|_, tracer| {
|
||||
tracer.success();
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn expand_frame<T>(
|
||||
&mut self,
|
||||
desc: &'static str,
|
||||
block: impl FnOnce(&mut TokensIterator) -> Result<T, ParseError>,
|
||||
) -> Result<T, ParseError>
|
||||
where
|
||||
T: std::fmt::Debug + FormatDebug + Clone + HasFallibleSpan + 'static,
|
||||
{
|
||||
self.with_expand_tracer(|_, tracer| tracer.start(desc));
|
||||
|
||||
let result = block(self);
|
||||
|
||||
self.with_expand_tracer(|_, tracer| match &result {
|
||||
Ok(result) => {
|
||||
tracer.add_result(Box::new(result.clone()));
|
||||
tracer.success();
|
||||
}
|
||||
|
||||
Err(err) => tracer.failed(err),
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn expand_expr_frame(
|
||||
&mut self,
|
||||
desc: &'static str,
|
||||
block: impl FnOnce(&mut TokensIterator) -> Result<Expression, ParseError>,
|
||||
) -> Result<Expression, ParseError> {
|
||||
self.with_expand_tracer(|_, tracer| tracer.start(desc));
|
||||
|
||||
let result = block(self);
|
||||
|
||||
self.with_expand_tracer(|_, tracer| match &result {
|
||||
Ok(expr) => {
|
||||
tracer.add_expr(expr.clone());
|
||||
tracer.success()
|
||||
}
|
||||
|
||||
Err(err) => tracer.failed(err),
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn color_fallible_frame<T>(
|
||||
&mut self,
|
||||
desc: &'static str,
|
||||
block: impl FnOnce(&mut TokensIterator) -> Result<T, ShellError>,
|
||||
) -> Result<T, ShellError> {
|
||||
self.with_color_tracer(|_, tracer| tracer.start(desc));
|
||||
|
||||
if self.at_end() {
|
||||
self.with_color_tracer(|_, tracer| tracer.eof_frame());
|
||||
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
|
||||
}
|
||||
|
||||
let result = block(self);
|
||||
|
||||
self.with_color_tracer(|_, tracer| match &result {
|
||||
Ok(_) => {
|
||||
tracer.success();
|
||||
}
|
||||
|
||||
Err(err) => tracer.failed(err),
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
@ -207,10 +413,12 @@ impl<'content> TokensIterator<'content> {
|
||||
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||
/// that you'll succeed.
|
||||
pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> {
|
||||
let index = self.index;
|
||||
let state = &mut self.state;
|
||||
|
||||
let index = state.index;
|
||||
#[cfg(coloring_in_tokens)]
|
||||
let shape_start = self.shapes.len();
|
||||
let seen = self.seen.clone();
|
||||
let shape_start = state.shapes.len();
|
||||
let seen = state.seen.clone();
|
||||
|
||||
Checkpoint {
|
||||
iterator: self,
|
||||
@ -228,10 +436,12 @@ impl<'content> TokensIterator<'content> {
|
||||
&'me mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
|
||||
) -> Result<T, ShellError> {
|
||||
let index = self.index;
|
||||
let state = &mut self.state;
|
||||
|
||||
let index = state.index;
|
||||
#[cfg(coloring_in_tokens)]
|
||||
let shape_start = self.shapes.len();
|
||||
let seen = self.seen.clone();
|
||||
let shape_start = state.shapes.len();
|
||||
let seen = state.seen.clone();
|
||||
|
||||
let checkpoint = Checkpoint {
|
||||
iterator: self,
|
||||
@ -255,11 +465,11 @@ impl<'content> TokensIterator<'content> {
|
||||
&'me mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
|
||||
) -> (Result<T, ShellError>, Vec<Spanned<FlatShape>>) {
|
||||
let index = self.index;
|
||||
let index = self.state.index;
|
||||
let mut shapes = vec![];
|
||||
|
||||
let seen = self.seen.clone();
|
||||
std::mem::swap(&mut self.shapes, &mut shapes);
|
||||
let seen = self.state.seen.clone();
|
||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
||||
|
||||
let checkpoint = Checkpoint {
|
||||
iterator: self,
|
||||
@ -274,7 +484,7 @@ impl<'content> TokensIterator<'content> {
|
||||
let value = match value {
|
||||
Err(err) => {
|
||||
drop(checkpoint);
|
||||
std::mem::swap(&mut self.shapes, &mut shapes);
|
||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
||||
return (Err(err), vec![]);
|
||||
}
|
||||
|
||||
@ -282,12 +492,12 @@ impl<'content> TokensIterator<'content> {
|
||||
};
|
||||
|
||||
checkpoint.commit();
|
||||
std::mem::swap(&mut self.shapes, &mut shapes);
|
||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
||||
return (Ok(value), shapes);
|
||||
}
|
||||
|
||||
fn eof_span(&self) -> Span {
|
||||
Span::new(self.span.end(), self.span.end())
|
||||
Span::new(self.state.span.end(), self.state.span.end())
|
||||
}
|
||||
|
||||
pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> {
|
||||
@ -309,11 +519,11 @@ impl<'content> TokensIterator<'content> {
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, position: usize) {
|
||||
self.seen.insert(position);
|
||||
self.state.seen.insert(position);
|
||||
}
|
||||
|
||||
pub fn at_end(&self) -> bool {
|
||||
peek(self, self.skip_ws).is_none()
|
||||
peek(self, self.state.skip_ws).is_none()
|
||||
}
|
||||
|
||||
pub fn at_end_possible_ws(&self) -> bool {
|
||||
@ -321,13 +531,15 @@ impl<'content> TokensIterator<'content> {
|
||||
}
|
||||
|
||||
pub fn advance(&mut self) {
|
||||
self.seen.insert(self.index);
|
||||
self.index += 1;
|
||||
self.state.seen.insert(self.state.index);
|
||||
self.state.index += 1;
|
||||
}
|
||||
|
||||
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
|
||||
for (i, item) in self.tokens.iter().enumerate() {
|
||||
if self.seen.contains(&i) {
|
||||
let state = &mut self.state;
|
||||
|
||||
for (i, item) in state.tokens.iter().enumerate() {
|
||||
if state.seen.contains(&i) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -336,7 +548,7 @@ impl<'content> TokensIterator<'content> {
|
||||
continue;
|
||||
}
|
||||
Some(value) => {
|
||||
self.seen.insert(i);
|
||||
state.seen.insert(i);
|
||||
return Some((i, value));
|
||||
}
|
||||
}
|
||||
@ -346,30 +558,29 @@ impl<'content> TokensIterator<'content> {
|
||||
}
|
||||
|
||||
pub fn move_to(&mut self, pos: usize) {
|
||||
self.index = pos;
|
||||
self.state.index = pos;
|
||||
}
|
||||
|
||||
pub fn restart(&mut self) {
|
||||
self.index = 0;
|
||||
self.state.index = 0;
|
||||
}
|
||||
|
||||
pub fn clone(&self) -> TokensIterator<'content> {
|
||||
TokensIterator {
|
||||
tokens: self.tokens,
|
||||
span: self.span,
|
||||
index: self.index,
|
||||
seen: self.seen.clone(),
|
||||
skip_ws: self.skip_ws,
|
||||
#[cfg(coloring_in_tokens)]
|
||||
shapes: self.shapes.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
// Get the next token, not including whitespace
|
||||
pub fn next_non_ws(&mut self) -> Option<&TokenNode> {
|
||||
let mut peeked = start_next(self, true);
|
||||
peeked.commit()
|
||||
}
|
||||
// pub fn clone(&self) -> TokensIterator<'content> {
|
||||
// let state = &self.state;
|
||||
// TokensIterator {
|
||||
// state: TokensIteratorState {
|
||||
// tokens: state.tokens,
|
||||
// span: state.span,
|
||||
// index: state.index,
|
||||
// seen: state.seen.clone(),
|
||||
// skip_ws: state.skip_ws,
|
||||
// #[cfg(coloring_in_tokens)]
|
||||
// shapes: state.shapes.clone(),
|
||||
// },
|
||||
// color_tracer: self.color_tracer.clone(),
|
||||
// expand_tracer: self.expand_tracer.clone(),
|
||||
// }
|
||||
// }
|
||||
|
||||
// Peek the next token, not including whitespace
|
||||
pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> {
|
||||
@ -384,10 +595,11 @@ impl<'content> TokensIterator<'content> {
|
||||
// Peek the next token, including whitespace, but not EOF
|
||||
pub fn peek_any_token<'me, T>(
|
||||
&'me mut self,
|
||||
block: impl FnOnce(&'content TokenNode) -> Result<T, ShellError>,
|
||||
) -> Result<T, ShellError> {
|
||||
expected: &'static str,
|
||||
block: impl FnOnce(&'content TokenNode) -> Result<T, ParseError>,
|
||||
) -> Result<T, ParseError> {
|
||||
let peeked = start_next(self, false);
|
||||
let peeked = peeked.not_eof("invariant");
|
||||
let peeked = peeked.not_eof(expected);
|
||||
|
||||
match peeked {
|
||||
Err(err) => return Err(err),
|
||||
@ -403,10 +615,10 @@ impl<'content> TokensIterator<'content> {
|
||||
|
||||
fn commit(&mut self, from: usize, to: usize) {
|
||||
for index in from..to {
|
||||
self.seen.insert(index);
|
||||
self.state.seen.insert(index);
|
||||
}
|
||||
|
||||
self.index = to;
|
||||
self.state.index = to;
|
||||
}
|
||||
|
||||
pub fn pos(&self, skip_ws: bool) -> Option<usize> {
|
||||
@ -414,17 +626,19 @@ impl<'content> TokensIterator<'content> {
|
||||
}
|
||||
|
||||
pub fn debug_remaining(&self) -> Vec<TokenNode> {
|
||||
let mut tokens = self.clone();
|
||||
tokens.restart();
|
||||
tokens.cloned().collect()
|
||||
// TODO: TODO: TODO: Clean up
|
||||
vec![]
|
||||
// let mut tokens = self.clone();
|
||||
// tokens.restart();
|
||||
// tokens.cloned().collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content> Iterator for TokensIterator<'content> {
|
||||
type Item = &'content TokenNode;
|
||||
|
||||
fn next(&mut self) -> Option<&'content TokenNode> {
|
||||
next(self, self.skip_ws)
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
next(self, self.state.skip_ws)
|
||||
}
|
||||
}
|
||||
|
||||
@ -432,23 +646,25 @@ fn peek<'content, 'me>(
|
||||
iterator: &'me TokensIterator<'content>,
|
||||
skip_ws: bool,
|
||||
) -> Option<&'me TokenNode> {
|
||||
let mut to = iterator.index;
|
||||
let state = iterator.state();
|
||||
|
||||
let mut to = state.index;
|
||||
|
||||
loop {
|
||||
if to >= iterator.tokens.len() {
|
||||
if to >= state.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if iterator.seen.contains(&to) {
|
||||
if state.seen.contains(&to) {
|
||||
to += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if to >= iterator.tokens.len() {
|
||||
if to >= state.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let node = &iterator.tokens[to];
|
||||
let node = &state.tokens[to];
|
||||
|
||||
match node {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
@ -465,23 +681,25 @@ fn peek_pos<'content, 'me>(
|
||||
iterator: &'me TokensIterator<'content>,
|
||||
skip_ws: bool,
|
||||
) -> Option<usize> {
|
||||
let mut to = iterator.index;
|
||||
let state = iterator.state();
|
||||
|
||||
let mut to = state.index;
|
||||
|
||||
loop {
|
||||
if to >= iterator.tokens.len() {
|
||||
if to >= state.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if iterator.seen.contains(&to) {
|
||||
if state.seen.contains(&to) {
|
||||
to += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if to >= iterator.tokens.len() {
|
||||
if to >= state.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let node = &iterator.tokens[to];
|
||||
let node = &state.tokens[to];
|
||||
|
||||
match node {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
@ -496,11 +714,13 @@ fn start_next<'content, 'me>(
|
||||
iterator: &'me mut TokensIterator<'content>,
|
||||
skip_ws: bool,
|
||||
) -> Peeked<'content, 'me> {
|
||||
let from = iterator.index;
|
||||
let mut to = iterator.index;
|
||||
let state = iterator.state();
|
||||
|
||||
let from = state.index;
|
||||
let mut to = state.index;
|
||||
|
||||
loop {
|
||||
if to >= iterator.tokens.len() {
|
||||
if to >= state.tokens.len() {
|
||||
return Peeked {
|
||||
node: None,
|
||||
iterator,
|
||||
@ -509,12 +729,12 @@ fn start_next<'content, 'me>(
|
||||
};
|
||||
}
|
||||
|
||||
if iterator.seen.contains(&to) {
|
||||
if state.seen.contains(&to) {
|
||||
to += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if to >= iterator.tokens.len() {
|
||||
if to >= state.tokens.len() {
|
||||
return Peeked {
|
||||
node: None,
|
||||
iterator,
|
||||
@ -523,7 +743,7 @@ fn start_next<'content, 'me>(
|
||||
};
|
||||
}
|
||||
|
||||
let node = &iterator.tokens[to];
|
||||
let node = &state.tokens[to];
|
||||
|
||||
match node {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
@ -547,20 +767,20 @@ fn next<'me, 'content>(
|
||||
skip_ws: bool,
|
||||
) -> Option<&'content TokenNode> {
|
||||
loop {
|
||||
if iterator.index >= iterator.tokens.len() {
|
||||
if iterator.state().index >= iterator.state().tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if iterator.seen.contains(&iterator.index) {
|
||||
if iterator.state().seen.contains(&iterator.state().index) {
|
||||
iterator.advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if iterator.index >= iterator.tokens.len() {
|
||||
if iterator.state().index >= iterator.state().tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match &iterator.tokens[iterator.index] {
|
||||
match &iterator.state().tokens[iterator.state().index] {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
iterator.advance();
|
||||
}
|
||||
|
@ -1,4 +1,12 @@
|
||||
use crate::parser::hir::tokens_iterator::TokensIterator;
|
||||
#![allow(unused)]
|
||||
|
||||
pub(crate) mod color_trace;
|
||||
pub(crate) mod expand_trace;
|
||||
|
||||
pub(crate) use self::color_trace::*;
|
||||
pub(crate) use self::expand_trace::*;
|
||||
|
||||
use crate::parser::hir::tokens_iterator::TokensIteratorState;
|
||||
use crate::traits::ToDebug;
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -8,15 +16,15 @@ pub(crate) enum DebugIteratorToken {
|
||||
Cursor,
|
||||
}
|
||||
|
||||
pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec<DebugIteratorToken> {
|
||||
pub(crate) fn debug_tokens(state: &TokensIteratorState, source: &str) -> Vec<DebugIteratorToken> {
|
||||
let mut out = vec![];
|
||||
|
||||
for (i, token) in iterator.tokens.iter().enumerate() {
|
||||
if iterator.index == i {
|
||||
for (i, token) in state.tokens.iter().enumerate() {
|
||||
if state.index == i {
|
||||
out.push(DebugIteratorToken::Cursor);
|
||||
}
|
||||
|
||||
if iterator.seen.contains(&i) {
|
||||
if state.seen.contains(&i) {
|
||||
out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source))));
|
||||
} else {
|
||||
out.push(DebugIteratorToken::Unseen(format!(
|
||||
|
351
src/parser/hir/tokens_iterator/debug/color_trace.rs
Normal file
351
src/parser/hir/tokens_iterator/debug/color_trace.rs
Normal file
@ -0,0 +1,351 @@
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::hir::syntax_shape::FlatShape;
|
||||
use crate::prelude::*;
|
||||
use ansi_term::Color;
|
||||
use log::trace;
|
||||
use ptree::*;
|
||||
use std::borrow::Cow;
|
||||
use std::io;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum FrameChild {
|
||||
#[allow(unused)]
|
||||
Shape(Spanned<FlatShape>),
|
||||
Frame(ColorFrame),
|
||||
}
|
||||
|
||||
impl FrameChild {
|
||||
fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
||||
match self {
|
||||
FrameChild::Shape(shape) => write!(
|
||||
f,
|
||||
"{} {:?}",
|
||||
Color::White
|
||||
.bold()
|
||||
.on(Color::Green)
|
||||
.paint(format!("{:?}", shape.item)),
|
||||
shape.span.slice(text)
|
||||
),
|
||||
|
||||
FrameChild::Frame(frame) => frame.colored_leaf_description(f),
|
||||
}
|
||||
}
|
||||
|
||||
fn into_tree_child(self, text: &Text) -> TreeChild {
|
||||
match self {
|
||||
FrameChild::Shape(shape) => TreeChild::Shape(shape, text.clone()),
|
||||
FrameChild::Frame(frame) => TreeChild::Frame(frame, text.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ColorFrame {
|
||||
description: &'static str,
|
||||
children: Vec<FrameChild>,
|
||||
error: Option<ShellError>,
|
||||
}
|
||||
|
||||
impl ColorFrame {
|
||||
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
if self.has_only_error_descendents() {
|
||||
if self.children.len() == 0 {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
Color::White.bold().on(Color::Red).paint(self.description)
|
||||
)
|
||||
} else {
|
||||
write!(f, "{}", Color::Red.normal().paint(self.description))
|
||||
}
|
||||
} else if self.has_descendent_shapes() {
|
||||
write!(f, "{}", Color::Green.normal().paint(self.description))
|
||||
} else {
|
||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))
|
||||
}
|
||||
}
|
||||
|
||||
fn colored_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
||||
if self.children.len() == 1 {
|
||||
let child = &self.children[0];
|
||||
|
||||
self.colored_leaf_description(f)?;
|
||||
write!(f, " -> ")?;
|
||||
child.colored_leaf_description(text, f)
|
||||
} else {
|
||||
self.colored_leaf_description(f)
|
||||
}
|
||||
}
|
||||
|
||||
fn children_for_formatting(&self, text: &Text) -> Vec<TreeChild> {
|
||||
if self.children.len() == 1 {
|
||||
let child = &self.children[0];
|
||||
|
||||
match child {
|
||||
FrameChild::Shape(_) => vec![],
|
||||
FrameChild::Frame(frame) => frame.tree_children(text),
|
||||
}
|
||||
} else {
|
||||
self.tree_children(text)
|
||||
}
|
||||
}
|
||||
|
||||
fn tree_children(&self, text: &Text) -> Vec<TreeChild> {
|
||||
self.children
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|c| c.into_tree_child(text))
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn add_shape(&mut self, shape: Spanned<FlatShape>) {
|
||||
self.children.push(FrameChild::Shape(shape))
|
||||
}
|
||||
|
||||
fn has_child_shapes(&self) -> bool {
|
||||
self.any_child_shape(|_| true)
|
||||
}
|
||||
|
||||
fn any_child_shape(&self, predicate: impl Fn(Spanned<FlatShape>) -> bool) -> bool {
|
||||
for item in &self.children {
|
||||
match item {
|
||||
FrameChild::Shape(shape) => {
|
||||
if predicate(*shape) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool {
|
||||
for item in &self.children {
|
||||
match item {
|
||||
FrameChild::Frame(frame) => {
|
||||
if predicate(frame) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn has_descendent_shapes(&self) -> bool {
|
||||
if self.has_child_shapes() {
|
||||
true
|
||||
} else {
|
||||
self.any_child_frame(|frame| frame.has_descendent_shapes())
|
||||
}
|
||||
}
|
||||
|
||||
fn has_only_error_descendents(&self) -> bool {
|
||||
if self.children.len() == 0 {
|
||||
// if this frame has no children at all, it has only error descendents if this frame
|
||||
// is an error
|
||||
self.error.is_some()
|
||||
} else {
|
||||
// otherwise, it has only error descendents if all of its children terminate in an
|
||||
// error (transitively)
|
||||
|
||||
let mut seen_error = false;
|
||||
|
||||
for child in &self.children {
|
||||
match child {
|
||||
// if this frame has at least one child shape, this frame has non-error descendents
|
||||
FrameChild::Shape(_) => return false,
|
||||
FrameChild::Frame(frame) => {
|
||||
// if the chi
|
||||
if frame.has_only_error_descendents() {
|
||||
seen_error = true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
seen_error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TreeChild {
|
||||
Shape(Spanned<FlatShape>, Text),
|
||||
Frame(ColorFrame, Text),
|
||||
}
|
||||
|
||||
impl TreeChild {
|
||||
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
match self {
|
||||
TreeChild::Shape(shape, text) => write!(
|
||||
f,
|
||||
"{} {:?}",
|
||||
Color::White
|
||||
.bold()
|
||||
.on(Color::Green)
|
||||
.paint(format!("{:?}", shape.item)),
|
||||
shape.span.slice(text)
|
||||
),
|
||||
|
||||
TreeChild::Frame(frame, _) => frame.colored_leaf_description(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TreeItem for TreeChild {
|
||||
type Child = TreeChild;
|
||||
|
||||
fn write_self<W: io::Write>(&self, f: &mut W, _style: &Style) -> io::Result<()> {
|
||||
match self {
|
||||
shape @ TreeChild::Shape(..) => shape.colored_leaf_description(f),
|
||||
|
||||
TreeChild::Frame(frame, text) => frame.colored_description(text, f),
|
||||
}
|
||||
}
|
||||
|
||||
fn children(&self) -> Cow<[Self::Child]> {
|
||||
match self {
|
||||
TreeChild::Shape(..) => Cow::Borrowed(&[]),
|
||||
TreeChild::Frame(frame, text) => Cow::Owned(frame.children_for_formatting(text)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ColorTracer {
|
||||
frame_stack: Vec<ColorFrame>,
|
||||
}
|
||||
|
||||
impl ColorTracer {
|
||||
pub fn print(self, source: Text) -> PrintTracer {
|
||||
PrintTracer {
|
||||
tracer: self,
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new() -> ColorTracer {
|
||||
let root = ColorFrame {
|
||||
description: "Trace",
|
||||
children: vec![],
|
||||
error: None,
|
||||
};
|
||||
|
||||
ColorTracer {
|
||||
frame_stack: vec![root],
|
||||
}
|
||||
}
|
||||
|
||||
fn current_frame(&mut self) -> &mut ColorFrame {
|
||||
let frames = &mut self.frame_stack;
|
||||
let last = frames.len() - 1;
|
||||
&mut frames[last]
|
||||
}
|
||||
|
||||
fn pop_frame(&mut self) -> ColorFrame {
|
||||
trace!(target: "nu::color_syntax", "Popping {:#?}", self);
|
||||
|
||||
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
||||
|
||||
if self.frame_stack.len() == 0 {
|
||||
panic!("Can't pop root tracer frame {:#?}", self);
|
||||
}
|
||||
|
||||
self.debug();
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn start(&mut self, description: &'static str) {
|
||||
let frame = ColorFrame {
|
||||
description,
|
||||
children: vec![],
|
||||
error: None,
|
||||
};
|
||||
|
||||
self.frame_stack.push(frame);
|
||||
self.debug();
|
||||
}
|
||||
|
||||
pub fn eof_frame(&mut self) {
|
||||
let current = self.pop_frame();
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn finish(&mut self) {
|
||||
loop {
|
||||
if self.frame_stack.len() == 1 {
|
||||
break;
|
||||
}
|
||||
|
||||
let frame = self.pop_frame();
|
||||
self.current_frame().children.push(FrameChild::Frame(frame));
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn add_shape(&mut self, shape: Spanned<FlatShape>) {
|
||||
self.current_frame().add_shape(shape);
|
||||
}
|
||||
|
||||
pub fn success(&mut self) {
|
||||
let current = self.pop_frame();
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
pub fn failed(&mut self, error: &ShellError) {
|
||||
let mut current = self.pop_frame();
|
||||
current.error = Some(error.clone());
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
fn debug(&self) {
|
||||
trace!(target: "nu::color_syntax",
|
||||
"frames = {:?}",
|
||||
self.frame_stack
|
||||
.iter()
|
||||
.map(|f| f.description)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
trace!(target: "nu::color_syntax", "{:#?}", self);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PrintTracer {
|
||||
tracer: ColorTracer,
|
||||
source: Text,
|
||||
}
|
||||
|
||||
impl TreeItem for PrintTracer {
|
||||
type Child = TreeChild;
|
||||
|
||||
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
||||
write!(f, "{}", style.paint("Color Trace"))
|
||||
}
|
||||
|
||||
fn children(&self) -> Cow<[Self::Child]> {
|
||||
Cow::Owned(vec![TreeChild::Frame(
|
||||
self.tracer.frame_stack[0].clone(),
|
||||
self.source.clone(),
|
||||
)])
|
||||
}
|
||||
}
|
365
src/parser/hir/tokens_iterator/debug/expand_trace.rs
Normal file
365
src/parser/hir/tokens_iterator/debug/expand_trace.rs
Normal file
@ -0,0 +1,365 @@
|
||||
use crate::parser::hir::Expression;
|
||||
use crate::prelude::*;
|
||||
use ansi_term::Color;
|
||||
use log::trace;
|
||||
use ptree::*;
|
||||
use std::borrow::Cow;
|
||||
use std::io;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FrameChild {
|
||||
Expr(Expression),
|
||||
Frame(ExprFrame),
|
||||
Result(Box<dyn FormatDebug>),
|
||||
}
|
||||
|
||||
impl FrameChild {
|
||||
fn get_error_leaf(&self) -> Option<&'static str> {
|
||||
match self {
|
||||
FrameChild::Frame(frame) if frame.error.is_some() => {
|
||||
if frame.children.len() == 0 {
|
||||
Some(frame.description)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn to_tree_child(&self, text: &Text) -> TreeChild {
|
||||
match self {
|
||||
FrameChild::Expr(expr) => TreeChild::OkExpr(expr.clone(), text.clone()),
|
||||
FrameChild::Result(result) => {
|
||||
let result = format!("{}", result.debug(text));
|
||||
TreeChild::OkNonExpr(result)
|
||||
}
|
||||
FrameChild::Frame(frame) => {
|
||||
if frame.error.is_some() {
|
||||
if frame.children.len() == 0 {
|
||||
TreeChild::ErrorLeaf(vec![frame.description])
|
||||
} else {
|
||||
TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone())
|
||||
}
|
||||
} else {
|
||||
TreeChild::OkFrame(frame.to_tree_frame(text), text.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExprFrame {
|
||||
description: &'static str,
|
||||
children: Vec<FrameChild>,
|
||||
error: Option<ParseError>,
|
||||
}
|
||||
|
||||
impl ExprFrame {
|
||||
fn to_tree_frame(&self, text: &Text) -> TreeFrame {
|
||||
let mut children = vec![];
|
||||
let mut errors = vec![];
|
||||
|
||||
for child in &self.children {
|
||||
if let Some(error_leaf) = child.get_error_leaf() {
|
||||
errors.push(error_leaf);
|
||||
continue;
|
||||
} else if errors.len() > 0 {
|
||||
children.push(TreeChild::ErrorLeaf(errors));
|
||||
errors = vec![];
|
||||
}
|
||||
|
||||
children.push(child.to_tree_child(text));
|
||||
}
|
||||
|
||||
if errors.len() > 0 {
|
||||
children.push(TreeChild::ErrorLeaf(errors));
|
||||
}
|
||||
|
||||
TreeFrame {
|
||||
description: self.description,
|
||||
children,
|
||||
error: self.error.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_expr(&mut self, expr: Expression) {
|
||||
self.children.push(FrameChild::Expr(expr))
|
||||
}
|
||||
|
||||
fn add_result(&mut self, result: Box<dyn FormatDebug>) {
|
||||
self.children.push(FrameChild::Result(result))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TreeFrame {
|
||||
description: &'static str,
|
||||
children: Vec<TreeChild>,
|
||||
error: Option<ParseError>,
|
||||
}
|
||||
|
||||
impl TreeFrame {
|
||||
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
if self.children.len() == 1 {
|
||||
if self.error.is_some() {
|
||||
write!(f, "{}", Color::Red.normal().paint(self.description))?;
|
||||
} else if self.has_descendent_green() {
|
||||
write!(f, "{}", Color::Green.normal().paint(self.description))?;
|
||||
} else {
|
||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))?;
|
||||
}
|
||||
|
||||
write!(f, " -> ")?;
|
||||
self.children[0].leaf_description(f)
|
||||
} else {
|
||||
if self.error.is_some() {
|
||||
if self.children.len() == 0 {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
Color::White.bold().on(Color::Red).paint(self.description)
|
||||
)
|
||||
} else {
|
||||
write!(f, "{}", Color::Red.normal().paint(self.description))
|
||||
}
|
||||
} else if self.has_descendent_green() {
|
||||
write!(f, "{}", Color::Green.normal().paint(self.description))
|
||||
} else {
|
||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_child_green(&self) -> bool {
|
||||
self.children.iter().any(|item| match item {
|
||||
TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false,
|
||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) => true,
|
||||
})
|
||||
}
|
||||
|
||||
fn any_child_frame(&self, predicate: impl Fn(&TreeFrame) -> bool) -> bool {
|
||||
for item in &self.children {
|
||||
match item {
|
||||
TreeChild::OkFrame(frame, ..) => {
|
||||
if predicate(frame) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn has_descendent_green(&self) -> bool {
|
||||
if self.has_child_green() {
|
||||
true
|
||||
} else {
|
||||
self.any_child_frame(|frame| frame.has_child_green())
|
||||
}
|
||||
}
|
||||
|
||||
fn children_for_formatting(&self, text: &Text) -> Vec<TreeChild> {
|
||||
if self.children.len() == 1 {
|
||||
let child: &TreeChild = &self.children[0];
|
||||
match child {
|
||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => {
|
||||
vec![]
|
||||
}
|
||||
TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => {
|
||||
frame.children_for_formatting(text)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.children.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TreeChild {
|
||||
OkNonExpr(String),
|
||||
OkExpr(Expression, Text),
|
||||
OkFrame(TreeFrame, Text),
|
||||
ErrorFrame(TreeFrame, Text),
|
||||
ErrorLeaf(Vec<&'static str>),
|
||||
}
|
||||
|
||||
impl TreeChild {
|
||||
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
match self {
|
||||
TreeChild::OkExpr(expr, text) => write!(
|
||||
f,
|
||||
"{} {} {}",
|
||||
Color::Cyan.normal().paint("returns"),
|
||||
Color::White.bold().on(Color::Green).paint(expr.type_name()),
|
||||
expr.span.slice(text)
|
||||
),
|
||||
|
||||
TreeChild::OkNonExpr(result) => write!(
|
||||
f,
|
||||
"{} {}",
|
||||
Color::Cyan.normal().paint("returns"),
|
||||
Color::White
|
||||
.bold()
|
||||
.on(Color::Green)
|
||||
.paint(format!("{}", result))
|
||||
),
|
||||
|
||||
TreeChild::ErrorLeaf(desc) => {
|
||||
let last = desc.len() - 1;
|
||||
|
||||
for (i, item) in desc.iter().enumerate() {
|
||||
write!(f, "{}", Color::White.bold().on(Color::Red).paint(*item))?;
|
||||
|
||||
if i != last {
|
||||
write!(f, "{}", Color::White.normal().paint(", "))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
TreeChild::ErrorFrame(frame, _) | TreeChild::OkFrame(frame, _) => {
|
||||
frame.leaf_description(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TreeItem for TreeChild {
|
||||
type Child = TreeChild;
|
||||
|
||||
fn write_self<W: io::Write>(&self, f: &mut W, _style: &Style) -> io::Result<()> {
|
||||
self.leaf_description(f)
|
||||
}
|
||||
|
||||
fn children(&self) -> Cow<[Self::Child]> {
|
||||
match self {
|
||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => {
|
||||
Cow::Borrowed(&[])
|
||||
}
|
||||
TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => {
|
||||
Cow::Owned(frame.children_for_formatting(text))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExpandTracer {
|
||||
frame_stack: Vec<ExprFrame>,
|
||||
}
|
||||
|
||||
impl ExpandTracer {
|
||||
pub fn print(&self, source: Text) -> PrintTracer {
|
||||
let root = self
|
||||
.frame_stack
|
||||
.iter()
|
||||
.nth(0)
|
||||
.unwrap()
|
||||
.to_tree_frame(&source);
|
||||
|
||||
PrintTracer { root, source }
|
||||
}
|
||||
|
||||
pub fn new() -> ExpandTracer {
|
||||
let root = ExprFrame {
|
||||
description: "Trace",
|
||||
children: vec![],
|
||||
error: None,
|
||||
};
|
||||
|
||||
ExpandTracer {
|
||||
frame_stack: vec![root],
|
||||
}
|
||||
}
|
||||
|
||||
fn current_frame(&mut self) -> &mut ExprFrame {
|
||||
let frames = &mut self.frame_stack;
|
||||
let last = frames.len() - 1;
|
||||
&mut frames[last]
|
||||
}
|
||||
|
||||
fn pop_frame(&mut self) -> ExprFrame {
|
||||
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
||||
|
||||
if self.frame_stack.len() == 0 {
|
||||
panic!("Can't pop root tracer frame");
|
||||
}
|
||||
|
||||
self.debug();
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn start(&mut self, description: &'static str) {
|
||||
let frame = ExprFrame {
|
||||
description,
|
||||
children: vec![],
|
||||
error: None,
|
||||
};
|
||||
|
||||
self.frame_stack.push(frame);
|
||||
self.debug();
|
||||
}
|
||||
|
||||
pub fn add_expr(&mut self, shape: Expression) {
|
||||
self.current_frame().add_expr(shape);
|
||||
}
|
||||
|
||||
pub fn add_result(&mut self, result: Box<dyn FormatDebug>) {
|
||||
self.current_frame().add_result(result);
|
||||
}
|
||||
|
||||
pub fn success(&mut self) {
|
||||
trace!(target: "parser::expand_syntax", "success {:#?}", self);
|
||||
|
||||
let current = self.pop_frame();
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
pub fn failed(&mut self, error: &ParseError) {
|
||||
let mut current = self.pop_frame();
|
||||
current.error = Some(error.clone());
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
fn debug(&self) {
|
||||
trace!(target: "nu::parser::expand",
|
||||
"frames = {:?}",
|
||||
self.frame_stack
|
||||
.iter()
|
||||
.map(|f| f.description)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
trace!(target: "nu::parser::expand", "{:#?}", self);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PrintTracer {
|
||||
root: TreeFrame,
|
||||
source: Text,
|
||||
}
|
||||
|
||||
impl TreeItem for PrintTracer {
|
||||
type Child = TreeChild;
|
||||
|
||||
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
||||
write!(f, "{}", style.paint("Expansion Trace"))
|
||||
}
|
||||
|
||||
fn children(&self) -> Cow<[Self::Child]> {
|
||||
Cow::Borrowed(&self.root.children)
|
||||
}
|
||||
}
|
16
src/parser/hir/tokens_iterator/tests.rs
Normal file
16
src/parser/hir/tokens_iterator/tests.rs
Normal file
@ -0,0 +1,16 @@
|
||||
use crate::parser::hir::TokensIterator;
|
||||
use crate::parser::parse::token_tree_builder::TokenTreeBuilder as b;
|
||||
use crate::Span;
|
||||
|
||||
#[test]
|
||||
fn supplies_tokens() {
|
||||
let tokens = b::token_list(vec![b::var("it"), b::op("."), b::bare("cpu")]);
|
||||
let (tokens, _) = b::build(tokens);
|
||||
|
||||
let tokens = tokens.expect_list();
|
||||
let mut iterator = TokensIterator::all(tokens, Span::unknown());
|
||||
|
||||
iterator.next().unwrap().expect_var();
|
||||
iterator.next().unwrap().expect_dot();
|
||||
iterator.next().unwrap().expect_bare();
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user