mirror of
https://github.com/nushell/nushell.git
synced 2024-11-25 18:03:51 +01:00
parent
0a198b9bd0
commit
c4daa2e40f
174
Cargo.lock
generated
174
Cargo.lock
generated
@ -155,7 +155,7 @@ dependencies = [
|
|||||||
"log",
|
"log",
|
||||||
"native-tls",
|
"native-tls",
|
||||||
"openssl",
|
"openssl",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
"url",
|
"url",
|
||||||
]
|
]
|
||||||
@ -252,7 +252,7 @@ dependencies = [
|
|||||||
"num-bigint",
|
"num-bigint",
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -262,7 +262,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "5753e2a71534719bf3f4e57006c3a4f0d2c672a4b676eec84161f763eca87dbf"
|
checksum = "5753e2a71534719bf3f4e57006c3a4f0d2c672a4b676eec84161f763eca87dbf"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -302,7 +302,7 @@ dependencies = [
|
|||||||
"linked-hash-map 0.5.2",
|
"linked-hash-map 0.5.2",
|
||||||
"md5",
|
"md5",
|
||||||
"rand",
|
"rand",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"time",
|
"time",
|
||||||
]
|
]
|
||||||
@ -316,7 +316,7 @@ dependencies = [
|
|||||||
"lazy_static 1.4.0",
|
"lazy_static 1.4.0",
|
||||||
"memchr",
|
"memchr",
|
||||||
"regex-automata",
|
"regex-automata",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -379,7 +379,7 @@ dependencies = [
|
|||||||
"encoding_rs",
|
"encoding_rs",
|
||||||
"log",
|
"log",
|
||||||
"quick-xml",
|
"quick-xml",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"zip",
|
"zip",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -406,7 +406,7 @@ checksum = "80094f509cf8b5ae86a4966a39b3ff66cd7e2a3e594accec3743ff3fabeab5b2"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"time",
|
"time",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -474,7 +474,7 @@ dependencies = [
|
|||||||
"lazy_static 1.4.0",
|
"lazy_static 1.4.0",
|
||||||
"nom 4.2.3",
|
"nom 4.2.3",
|
||||||
"rust-ini",
|
"rust-ini",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde-hjson 0.8.2",
|
"serde-hjson 0.8.2",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"toml 0.4.10",
|
"toml 0.4.10",
|
||||||
@ -636,7 +636,7 @@ dependencies = [
|
|||||||
"csv-core",
|
"csv-core",
|
||||||
"itoa",
|
"itoa",
|
||||||
"ryu",
|
"ryu",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -839,39 +839,6 @@ dependencies = [
|
|||||||
"cfg-if",
|
"cfg-if",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "enumflags2"
|
|
||||||
version = "0.6.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "33121c8782ba948ba332dab29311b026a8716dc65a1599e5b88f392d38496af8"
|
|
||||||
dependencies = [
|
|
||||||
"enumflags2_derive",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "enumflags2_derive"
|
|
||||||
version = "0.6.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ecf634c5213044b8d54a46dd282cf5dd1f86bb5cb53e92c409cb4680a7fb9894"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "env_logger"
|
|
||||||
version = "0.6.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "aafcde04e90a5226a6443b7aabdb016ba2f8307c847d524724bd9b346dd1a2d3"
|
|
||||||
dependencies = [
|
|
||||||
"atty",
|
|
||||||
"humantime",
|
|
||||||
"log",
|
|
||||||
"regex",
|
|
||||||
"termcolor",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "env_logger"
|
name = "env_logger"
|
||||||
version = "0.7.1"
|
version = "0.7.1"
|
||||||
@ -891,7 +858,7 @@ version = "0.3.10"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "cd7d80305c9bd8cd78e3c753eb9fb110f83621e5211f1a3afffcc812b104daf9"
|
checksum = "cd7d80305c9bd8cd78e3c753eb9fb110f83621e5211f1a3afffcc812b104daf9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1553,7 +1520,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "076f042c5b7b98f31d205f1249267e12a6518c1481e9dae9764af19b707d2292"
|
checksum = "076f042c5b7b98f31d205f1249267e12a6518c1481e9dae9764af19b707d2292"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg 1.0.0",
|
"autocfg 1.0.0",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1717,7 +1684,7 @@ dependencies = [
|
|||||||
"itertools 0.7.11",
|
"itertools 0.7.11",
|
||||||
"log",
|
"log",
|
||||||
"render-tree",
|
"render-tree",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"termcolor",
|
"termcolor",
|
||||||
]
|
]
|
||||||
@ -2038,7 +2005,7 @@ dependencies = [
|
|||||||
"bincode",
|
"bincode",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"log",
|
"log",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
@ -2169,7 +2136,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
version = "0.12.0"
|
version = "0.12.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap",
|
"clap",
|
||||||
"crossterm",
|
"crossterm",
|
||||||
@ -2180,7 +2147,6 @@ dependencies = [
|
|||||||
"nu-build",
|
"nu-build",
|
||||||
"nu-cli",
|
"nu-cli",
|
||||||
"nu-errors",
|
"nu-errors",
|
||||||
"nu-macros",
|
|
||||||
"nu-parser",
|
"nu-parser",
|
||||||
"nu-plugin",
|
"nu-plugin",
|
||||||
"nu-protocol",
|
"nu-protocol",
|
||||||
@ -2200,9 +2166,9 @@ dependencies = [
|
|||||||
"nu_plugin_tree",
|
"nu_plugin_tree",
|
||||||
"onig_sys",
|
"onig_sys",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"pretty_env_logger 0.4.0",
|
"pretty_env_logger",
|
||||||
"semver",
|
"semver",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"syntect",
|
"syntect",
|
||||||
"toml 0.5.6",
|
"toml 0.5.6",
|
||||||
"url",
|
"url",
|
||||||
@ -2213,7 +2179,7 @@ name = "nu-build"
|
|||||||
version = "0.12.0"
|
version = "0.12.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static 1.4.0",
|
"lazy_static 1.4.0",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"toml 0.5.6",
|
"toml 0.5.6",
|
||||||
]
|
]
|
||||||
@ -2262,7 +2228,6 @@ dependencies = [
|
|||||||
"nom_locate",
|
"nom_locate",
|
||||||
"nu-build",
|
"nu-build",
|
||||||
"nu-errors",
|
"nu-errors",
|
||||||
"nu-macros",
|
|
||||||
"nu-parser",
|
"nu-parser",
|
||||||
"nu-plugin",
|
"nu-plugin",
|
||||||
"nu-protocol",
|
"nu-protocol",
|
||||||
@ -2275,7 +2240,7 @@ dependencies = [
|
|||||||
"pin-utils",
|
"pin-utils",
|
||||||
"pretty-hex",
|
"pretty-hex",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"pretty_env_logger 0.4.0",
|
"pretty_env_logger",
|
||||||
"prettytable-rs",
|
"prettytable-rs",
|
||||||
"ptree",
|
"ptree",
|
||||||
"query_interface",
|
"query_interface",
|
||||||
@ -2284,14 +2249,14 @@ dependencies = [
|
|||||||
"roxmltree",
|
"roxmltree",
|
||||||
"rusqlite",
|
"rusqlite",
|
||||||
"rustyline",
|
"rustyline",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde-hjson 0.9.1",
|
"serde-hjson 0.9.1",
|
||||||
"serde_bytes",
|
"serde_bytes",
|
||||||
"serde_ini",
|
"serde_ini",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
"serde_yaml",
|
"serde_yaml",
|
||||||
"shellexpand 2.0.0",
|
"shellexpand",
|
||||||
"starship",
|
"starship",
|
||||||
"strip-ansi-escapes",
|
"strip-ansi-escapes",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
@ -2321,50 +2286,29 @@ dependencies = [
|
|||||||
"nu-source",
|
"nu-source",
|
||||||
"num-bigint",
|
"num-bigint",
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_yaml",
|
"serde_yaml",
|
||||||
"toml 0.5.6",
|
"toml 0.5.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "nu-macros"
|
|
||||||
version = "0.12.0"
|
|
||||||
dependencies = [
|
|
||||||
"nu-protocol",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-parser"
|
name = "nu-parser"
|
||||||
version = "0.12.0"
|
version = "0.12.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ansi_term 0.12.1",
|
|
||||||
"bigdecimal",
|
"bigdecimal",
|
||||||
"cfg-if",
|
|
||||||
"derive-new",
|
"derive-new",
|
||||||
"enumflags2",
|
|
||||||
"getset 0.0.9",
|
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"itertools 0.8.2",
|
|
||||||
"language-reporting",
|
"language-reporting",
|
||||||
"log",
|
"log",
|
||||||
"nom 5.1.1",
|
|
||||||
"nom-tracable",
|
|
||||||
"nom_locate",
|
|
||||||
"nu-build",
|
|
||||||
"nu-errors",
|
"nu-errors",
|
||||||
"nu-protocol",
|
"nu-protocol",
|
||||||
"nu-source",
|
"nu-source",
|
||||||
"num-bigint",
|
"num-bigint",
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
"pretty",
|
"parking_lot",
|
||||||
"pretty_assertions",
|
"serde 1.0.106",
|
||||||
"pretty_env_logger 0.3.1",
|
"shellexpand",
|
||||||
"ptree",
|
|
||||||
"serde 1.0.105",
|
|
||||||
"shellexpand 1.1.1",
|
|
||||||
"termcolor",
|
|
||||||
"unicode-xid",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2378,7 +2322,7 @@ dependencies = [
|
|||||||
"nu-source",
|
"nu-source",
|
||||||
"nu-value-ext",
|
"nu-value-ext",
|
||||||
"num-bigint",
|
"num-bigint",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -2404,7 +2348,7 @@ dependencies = [
|
|||||||
"num-bigint",
|
"num-bigint",
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
"query_interface",
|
"query_interface",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_bytes",
|
"serde_bytes",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_yaml",
|
"serde_yaml",
|
||||||
@ -2423,7 +2367,7 @@ dependencies = [
|
|||||||
"nom_locate",
|
"nom_locate",
|
||||||
"nu-build",
|
"nu-build",
|
||||||
"pretty",
|
"pretty",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"termcolor",
|
"termcolor",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -2624,7 +2568,7 @@ dependencies = [
|
|||||||
"autocfg 1.0.0",
|
"autocfg 1.0.0",
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-traits 0.2.11",
|
"num-traits 0.2.11",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2818,7 +2762,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "0ecb53e7b83e5016bf4ac041e15e02b0d240cb27072b19b651b0b4d8cd6bbda9"
|
checksum = "0ecb53e7b83e5016bf4ac041e15e02b0d240cb27072b19b651b0b4d8cd6bbda9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"winapi 0.3.8",
|
"winapi 0.3.8",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -2931,7 +2875,7 @@ dependencies = [
|
|||||||
"byteorder",
|
"byteorder",
|
||||||
"humantime",
|
"humantime",
|
||||||
"line-wrap",
|
"line-wrap",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"xml-rs",
|
"xml-rs",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -2986,24 +2930,13 @@ dependencies = [
|
|||||||
"output_vt100",
|
"output_vt100",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pretty_env_logger"
|
|
||||||
version = "0.3.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "717ee476b1690853d222af4634056d830b5197ffd747726a9a1eee6da9f49074"
|
|
||||||
dependencies = [
|
|
||||||
"chrono",
|
|
||||||
"env_logger 0.6.2",
|
|
||||||
"log",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pretty_env_logger"
|
name = "pretty_env_logger"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "926d36b9553851b8b0005f1275891b392ee4d2d833852c417ed025477350fb9d"
|
checksum = "926d36b9553851b8b0005f1275891b392ee4d2d833852c417ed025477350fb9d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"env_logger 0.7.1",
|
"env_logger",
|
||||||
"log",
|
"log",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3084,7 +3017,7 @@ dependencies = [
|
|||||||
"directories",
|
"directories",
|
||||||
"isatty",
|
"isatty",
|
||||||
"petgraph",
|
"petgraph",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde-value",
|
"serde-value",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"tint",
|
"tint",
|
||||||
@ -3333,7 +3266,7 @@ version = "1.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e46a2036019fdb888131db7a4c847a1063a7493f971ed94ea82c67eada63ca54"
|
checksum = "e46a2036019fdb888131db7a4c847a1063a7493f971ed94ea82c67eada63ca54"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3452,9 +3385,9 @@ checksum = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.105"
|
version = "1.0.106"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e707fbbf255b8fc8c3b99abb91e7257a622caeb20a9818cbadbeeede4e0932ff"
|
checksum = "36df6ac6412072f67cf767ebbde4133a5b2e88e76dc6187fa7104cd16f783399"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
@ -3492,7 +3425,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "7a663f873dedc4eac1a559d4c6bc0d0b2c34dc5ac4702e105014b8281489e44f"
|
checksum = "7a663f873dedc4eac1a559d4c6bc0d0b2c34dc5ac4702e105014b8281489e44f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ordered-float",
|
"ordered-float",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3501,14 +3434,14 @@ version = "0.11.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "325a073952621257820e7a3469f55ba4726d8b28657e7e36653d1c36dc2c84ae"
|
checksum = "325a073952621257820e7a3469f55ba4726d8b28657e7e36653d1c36dc2c84ae"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.105"
|
version = "1.0.106"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ac5d00fc561ba2724df6758a17de23df5914f20e41cb00f94d5b7ae42fffaff8"
|
checksum = "9e549e3abf4fb8621bd1609f11dfc9f5e50320802273b12f3811a67e6716ea6c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -3522,7 +3455,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "eb236687e2bb073a7521c021949be944641e671b8505a94069ca37b656c81139"
|
checksum = "eb236687e2bb073a7521c021949be944641e671b8505a94069ca37b656c81139"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"result",
|
"result",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"void",
|
"void",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3535,7 +3468,7 @@ dependencies = [
|
|||||||
"indexmap",
|
"indexmap",
|
||||||
"itoa",
|
"itoa",
|
||||||
"ryu",
|
"ryu",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3555,7 +3488,7 @@ checksum = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"dtoa",
|
"dtoa",
|
||||||
"itoa",
|
"itoa",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"url",
|
"url",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3567,7 +3500,7 @@ checksum = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"dtoa",
|
"dtoa",
|
||||||
"linked-hash-map 0.5.2",
|
"linked-hash-map 0.5.2",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"yaml-rust",
|
"yaml-rust",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3581,15 +3514,6 @@ dependencies = [
|
|||||||
"winapi-build",
|
"winapi-build",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "shellexpand"
|
|
||||||
version = "1.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2c7e79eddc7b411f9beeaaf2d421de7e7cb3b1ab9eaf1b79704c0e4130cba6b5"
|
|
||||||
dependencies = [
|
|
||||||
"dirs 2.0.2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "shellexpand"
|
name = "shellexpand"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
@ -3682,7 +3606,7 @@ dependencies = [
|
|||||||
"open",
|
"open",
|
||||||
"os_info",
|
"os_info",
|
||||||
"path-slash",
|
"path-slash",
|
||||||
"pretty_env_logger 0.4.0",
|
"pretty_env_logger",
|
||||||
"rayon",
|
"rayon",
|
||||||
"regex",
|
"regex",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@ -3742,7 +3666,7 @@ dependencies = [
|
|||||||
"log",
|
"log",
|
||||||
"mime",
|
"mime",
|
||||||
"mime_guess",
|
"mime_guess",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
"url",
|
"url",
|
||||||
@ -3800,7 +3724,7 @@ dependencies = [
|
|||||||
"onig",
|
"onig",
|
||||||
"plist",
|
"plist",
|
||||||
"regex-syntax",
|
"regex-syntax",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"walkdir",
|
"walkdir",
|
||||||
@ -3952,7 +3876,7 @@ version = "0.4.10"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f"
|
checksum = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3961,7 +3885,7 @@ version = "0.5.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ffc92d160b1eef40665be3a05630d003936a3bc7da7421277846c2613e92c71a"
|
checksum = "ffc92d160b1eef40665be3a05630d003936a3bc7da7421277846c2613e92c71a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3994,7 +3918,7 @@ dependencies = [
|
|||||||
"erased-serde",
|
"erased-serde",
|
||||||
"inventory",
|
"inventory",
|
||||||
"lazy_static 1.4.0",
|
"lazy_static 1.4.0",
|
||||||
"serde 1.0.105",
|
"serde 1.0.106",
|
||||||
"typetag-impl",
|
"typetag-impl",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
version = "0.12.0"
|
version = "0.12.1"
|
||||||
authors = ["The Nu Project Contributors"]
|
authors = ["The Nu Project Contributors"]
|
||||||
description = "A new kind of shell"
|
description = "A new type of shell"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@ -36,7 +36,6 @@ nu_plugin_str = { version = "0.12.0", path = "./crates/nu_plugin_str", optional=
|
|||||||
nu_plugin_sys = { version = "0.12.0", path = "./crates/nu_plugin_sys", optional=true }
|
nu_plugin_sys = { version = "0.12.0", path = "./crates/nu_plugin_sys", optional=true }
|
||||||
nu_plugin_textview = { version = "0.12.0", path = "./crates/nu_plugin_textview", optional=true }
|
nu_plugin_textview = { version = "0.12.0", path = "./crates/nu_plugin_textview", optional=true }
|
||||||
nu_plugin_tree = { version = "0.12.0", path = "./crates/nu_plugin_tree", optional=true }
|
nu_plugin_tree = { version = "0.12.0", path = "./crates/nu_plugin_tree", optional=true }
|
||||||
nu-macros = { version = "0.12.0", path = "./crates/nu-macros" }
|
|
||||||
|
|
||||||
crossterm = { version = "0.16.0", optional = true }
|
crossterm = { version = "0.16.0", optional = true }
|
||||||
onig_sys = { version = "=69.1.0", optional = true }
|
onig_sys = { version = "=69.1.0", optional = true }
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-build"
|
name = "nu-build"
|
||||||
version = "0.12.0"
|
version = "0.12.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["The Nu Project Contributors"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "Core build system for nushell"
|
description = "Core build system for nushell"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-cli"
|
name = "nu-cli"
|
||||||
version = "0.12.0"
|
version = "0.12.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["The Nu Project Contributors"]
|
||||||
description = "CLI for nushell"
|
description = "CLI for nushell"
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
@ -16,7 +16,6 @@ nu-protocol = { version = "0.12.0", path = "../nu-protocol" }
|
|||||||
nu-errors = { version = "0.12.0", path = "../nu-errors" }
|
nu-errors = { version = "0.12.0", path = "../nu-errors" }
|
||||||
nu-parser = { version = "0.12.0", path = "../nu-parser" }
|
nu-parser = { version = "0.12.0", path = "../nu-parser" }
|
||||||
nu-value-ext = { version = "0.12.0", path = "../nu-value-ext" }
|
nu-value-ext = { version = "0.12.0", path = "../nu-value-ext" }
|
||||||
nu-macros = { version = "0.12.0", path = "../nu-macros" }
|
|
||||||
nu-test-support = { version = "0.12.0", path = "../nu-test-support" }
|
nu-test-support = { version = "0.12.0", path = "../nu-test-support" }
|
||||||
|
|
||||||
ansi_term = "0.12.1"
|
ansi_term = "0.12.1"
|
||||||
|
@ -10,13 +10,10 @@ use crate::prelude::*;
|
|||||||
use futures_codec::FramedRead;
|
use futures_codec::FramedRead;
|
||||||
|
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::{
|
use nu_parser::{ClassifiedCommand, ExternalCommand};
|
||||||
ClassifiedCommand, ClassifiedPipeline, ExternalCommand, PipelineShape, SpannedToken,
|
|
||||||
TokensIterator,
|
|
||||||
};
|
|
||||||
use nu_protocol::{Primitive, ReturnSuccess, Signature, UntaggedValue, Value};
|
use nu_protocol::{Primitive, ReturnSuccess, Signature, UntaggedValue, Value};
|
||||||
|
|
||||||
use log::{debug, log_enabled, trace};
|
use log::{debug, trace};
|
||||||
use rustyline::error::ReadlineError;
|
use rustyline::error::ReadlineError;
|
||||||
use rustyline::{
|
use rustyline::{
|
||||||
self, config::Configurer, config::EditMode, At, Cmd, ColorMode, CompletionType, Config, Editor,
|
self, config::Configurer, config::EditMode, At, Cmd, ColorMode, CompletionType, Config, Editor,
|
||||||
@ -614,9 +611,9 @@ async fn process_line(
|
|||||||
Ok(line) => {
|
Ok(line) => {
|
||||||
let line = chomp_newline(line);
|
let line = chomp_newline(line);
|
||||||
|
|
||||||
let result = match nu_parser::parse(&line) {
|
let result = match nu_parser::lite_parse(&line, 0) {
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return LineResult::Error(line.to_string(), err);
|
return LineResult::Error(line.to_string(), err.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(val) => val,
|
Ok(val) => val,
|
||||||
@ -625,7 +622,9 @@ async fn process_line(
|
|||||||
debug!("=== Parsed ===");
|
debug!("=== Parsed ===");
|
||||||
debug!("{:#?}", result);
|
debug!("{:#?}", result);
|
||||||
|
|
||||||
let pipeline = classify_pipeline(&result, &ctx, &Text::from(line));
|
let pipeline = nu_parser::classify_pipeline(&result, ctx.registry());
|
||||||
|
|
||||||
|
//println!("{:#?}", pipeline);
|
||||||
|
|
||||||
if let Some(failure) = pipeline.failed {
|
if let Some(failure) = pipeline.failed {
|
||||||
return LineResult::Error(line.to_string(), failure.into());
|
return LineResult::Error(line.to_string(), failure.into());
|
||||||
@ -642,9 +641,9 @@ async fn process_line(
|
|||||||
ref name, ref args, ..
|
ref name, ref args, ..
|
||||||
}) = pipeline.commands.list[0]
|
}) = pipeline.commands.list[0]
|
||||||
{
|
{
|
||||||
if dunce::canonicalize(name).is_ok()
|
if dunce::canonicalize(&name).is_ok()
|
||||||
&& PathBuf::from(name).is_dir()
|
&& PathBuf::from(&name).is_dir()
|
||||||
&& ichwh::which(name).await.unwrap_or(None).is_none()
|
&& ichwh::which(&name).await.unwrap_or(None).is_none()
|
||||||
&& args.list.is_empty()
|
&& args.list.is_empty()
|
||||||
{
|
{
|
||||||
// Here we work differently if we're in Windows because of the expected Windows behavior
|
// Here we work differently if we're in Windows because of the expected Windows behavior
|
||||||
@ -762,26 +761,6 @@ async fn process_line(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn classify_pipeline(
|
|
||||||
pipeline: &SpannedToken,
|
|
||||||
context: &Context,
|
|
||||||
source: &Text,
|
|
||||||
) -> ClassifiedPipeline {
|
|
||||||
let pipeline_list = vec![pipeline.clone()];
|
|
||||||
let expand_context = context.expand_context(source);
|
|
||||||
let mut iterator = TokensIterator::new(&pipeline_list, expand_context, pipeline.span());
|
|
||||||
|
|
||||||
let result = iterator.expand_infallible(PipelineShape);
|
|
||||||
|
|
||||||
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
|
||||||
outln!("");
|
|
||||||
let _ = ptree::print_tree(&iterator.expand_tracer().print(source.clone()));
|
|
||||||
outln!("");
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
|
pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
|
||||||
let diag = err.into_diagnostic();
|
let diag = err.into_diagnostic();
|
||||||
|
|
||||||
|
@ -282,7 +282,7 @@ fn create_default_command_args(context: &RunnableContextWithoutInput) -> RawComm
|
|||||||
call_info: UnevaluatedCallInfo {
|
call_info: UnevaluatedCallInfo {
|
||||||
args: hir::Call {
|
args: hir::Call {
|
||||||
head: Box::new(SpannedExpression::new(
|
head: Box::new(SpannedExpression::new(
|
||||||
Expression::Literal(Literal::String(span)),
|
Expression::Literal(Literal::String(String::new())),
|
||||||
span,
|
span,
|
||||||
)),
|
)),
|
||||||
positional: None,
|
positional: None,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
use crate::commands::WholeStreamCommand;
|
use crate::commands::WholeStreamCommand;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_macros::signature;
|
|
||||||
use nu_protocol::{Signature, SyntaxShape};
|
use nu_protocol::{Signature, SyntaxShape};
|
||||||
|
|
||||||
pub struct Cd;
|
pub struct Cd;
|
||||||
@ -12,17 +11,11 @@ impl WholeStreamCommand for Cd {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
signature! {
|
Signature::build("cd").optional(
|
||||||
def cd {
|
"directory",
|
||||||
"the directory to change to"
|
SyntaxShape::Path,
|
||||||
directory(optional Path) - "the directory to change to"
|
"the directory to change to",
|
||||||
}
|
)
|
||||||
}
|
|
||||||
// Signature::build("cd").optional(
|
|
||||||
// "directory",
|
|
||||||
// SyntaxShape::Path,
|
|
||||||
// "the directory to change to",
|
|
||||||
// )
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use nu_parser::hir;
|
use nu_parser::hir;
|
||||||
|
|
||||||
#[derive(new, Debug, Eq, PartialEq)]
|
#[derive(new, Debug)]
|
||||||
pub(crate) struct Command {
|
pub(crate) struct Command {
|
||||||
pub(crate) args: hir::Call,
|
pub(crate) args: hir::Call,
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,7 @@ use futures::stream::StreamExt;
|
|||||||
use futures_codec::FramedRead;
|
use futures_codec::FramedRead;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::commands::classified::external::ExternalArg;
|
use nu_parser::ExternalArg;
|
||||||
use nu_parser::ExternalCommand;
|
use nu_parser::ExternalCommand;
|
||||||
use nu_protocol::{ColumnPath, Primitive, ShellTypeName, UntaggedValue, Value};
|
use nu_protocol::{ColumnPath, Primitive, ShellTypeName, UntaggedValue, Value};
|
||||||
use nu_source::{Tag, Tagged};
|
use nu_source::{Tag, Tagged};
|
||||||
|
@ -28,7 +28,7 @@ pub(crate) fn run_internal_command(
|
|||||||
let result = {
|
let result = {
|
||||||
context.run_command(
|
context.run_command(
|
||||||
internal_command?,
|
internal_command?,
|
||||||
command.name_tag.clone(),
|
Tag::unknown_anchor(command.name_span),
|
||||||
command.args.clone(),
|
command.args.clone(),
|
||||||
&source,
|
&source,
|
||||||
objects,
|
objects,
|
||||||
@ -71,7 +71,7 @@ pub(crate) fn run_internal_command(
|
|||||||
span: Span::unknown()
|
span: Span::unknown()
|
||||||
},
|
},
|
||||||
source: source.clone(),
|
source: source.clone(),
|
||||||
name_tag: command.name_tag,
|
name_tag: Tag::unknown_anchor(command.name_span),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), &context.registry);
|
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), &context.registry);
|
||||||
|
@ -23,10 +23,9 @@ pub(crate) async fn run_pipeline(
|
|||||||
return Err(ShellError::unimplemented("Dynamic commands"))
|
return Err(ShellError::unimplemented("Dynamic commands"))
|
||||||
}
|
}
|
||||||
|
|
||||||
(Some(ClassifiedCommand::Expr(_)), _) | (_, Some(ClassifiedCommand::Expr(_))) => {
|
// (Some(ClassifiedCommand::Expr(_)), _) | (_, Some(ClassifiedCommand::Expr(_))) => {
|
||||||
return Err(ShellError::unimplemented("Expression-only commands"))
|
// return Err(ShellError::unimplemented("Expression-only commands"))
|
||||||
}
|
// }
|
||||||
|
|
||||||
(Some(ClassifiedCommand::Error(err)), _) => return Err(err.into()),
|
(Some(ClassifiedCommand::Error(err)), _) => return Err(err.into()),
|
||||||
(_, Some(ClassifiedCommand::Error(err))) => return Err(err.clone().into()),
|
(_, Some(ClassifiedCommand::Error(err))) => return Err(err.clone().into()),
|
||||||
|
|
||||||
@ -43,6 +42,7 @@ pub(crate) async fn run_pipeline(
|
|||||||
}
|
}
|
||||||
|
|
||||||
(None, _) => break,
|
(None, _) => break,
|
||||||
|
_ => unimplemented!("Not yet implented cases in run_pipeline"),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ impl WholeStreamCommand for Config {
|
|||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
"set_into",
|
"set_into",
|
||||||
SyntaxShape::Member,
|
SyntaxShape::String,
|
||||||
"sets a variable from values in the pipeline",
|
"sets a variable from values in the pipeline",
|
||||||
Some('i'),
|
Some('i'),
|
||||||
)
|
)
|
||||||
|
@ -1,12 +1,45 @@
|
|||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
use csv::{ErrorKind, ReaderBuilder};
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::hir::syntax_shape::{ExpandContext, SignatureRegistry};
|
use nu_protocol::{ReturnSuccess, TaggedDictBuilder, UntaggedValue, Value};
|
||||||
use nu_parser::utils::{parse_line_with_separator as parse, LineSeparatedShape};
|
|
||||||
use nu_parser::TokensIterator;
|
|
||||||
use nu_protocol::{ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
|
|
||||||
use nu_source::nom_input;
|
|
||||||
|
|
||||||
use derive_new::new;
|
fn from_delimited_string_to_value(
|
||||||
|
s: String,
|
||||||
|
headerless: bool,
|
||||||
|
separator: char,
|
||||||
|
tag: impl Into<Tag>,
|
||||||
|
) -> Result<Value, csv::Error> {
|
||||||
|
let mut reader = ReaderBuilder::new()
|
||||||
|
.has_headers(!headerless)
|
||||||
|
.delimiter(separator as u8)
|
||||||
|
.from_reader(s.as_bytes());
|
||||||
|
let tag = tag.into();
|
||||||
|
|
||||||
|
let headers = if headerless {
|
||||||
|
(1..=reader.headers()?.len())
|
||||||
|
.map(|i| format!("Column{}", i))
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
} else {
|
||||||
|
reader.headers()?.iter().map(String::from).collect()
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut rows = vec![];
|
||||||
|
for row in reader.records() {
|
||||||
|
let mut tagged_row = TaggedDictBuilder::new(&tag);
|
||||||
|
for (value, header) in row?.iter().zip(headers.iter()) {
|
||||||
|
if let Ok(i) = value.parse::<i64>() {
|
||||||
|
tagged_row.insert_value(header, UntaggedValue::int(i).into_value(&tag))
|
||||||
|
} else if let Ok(f) = value.parse::<f64>() {
|
||||||
|
tagged_row.insert_value(header, UntaggedValue::decimal(f).into_value(&tag))
|
||||||
|
} else {
|
||||||
|
tagged_row.insert_value(header, UntaggedValue::string(value).into_value(&tag))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rows.push(tagged_row.into_value());
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(UntaggedValue::Table(rows).into_value(&tag))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn from_delimited_data(
|
pub fn from_delimited_data(
|
||||||
headerless: bool,
|
headerless: bool,
|
||||||
@ -20,20 +53,19 @@ pub fn from_delimited_data(
|
|||||||
let concat_string = input.collect_string(name_tag.clone()).await?;
|
let concat_string = input.collect_string(name_tag.clone()).await?;
|
||||||
|
|
||||||
match from_delimited_string_to_value(concat_string.item, headerless, sep, name_tag.clone()) {
|
match from_delimited_string_to_value(concat_string.item, headerless, sep, name_tag.clone()) {
|
||||||
Ok(rows) => {
|
Ok(x) => match x {
|
||||||
for row in rows {
|
Value { value: UntaggedValue::Table(list), .. } => {
|
||||||
match row {
|
for l in list {
|
||||||
Value { value: UntaggedValue::Table(list), .. } => {
|
yield ReturnSuccess::value(l);
|
||||||
for l in list {
|
|
||||||
yield ReturnSuccess::value(l);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
x => yield ReturnSuccess::value(x),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
x => yield ReturnSuccess::value(x),
|
||||||
},
|
},
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let line_one = format!("Could not parse as {}", format_name);
|
let line_one = match pretty_csv_error(err) {
|
||||||
|
Some(pretty) => format!("Could not parse as {} ({})", format_name,pretty),
|
||||||
|
None => format!("Could not parse as {}", format_name),
|
||||||
|
};
|
||||||
let line_two = format!("input cannot be parsed as {}", format_name);
|
let line_two = format!("input cannot be parsed as {}", format_name);
|
||||||
yield Err(ShellError::labeled_error_with_secondary(
|
yield Err(ShellError::labeled_error_with_secondary(
|
||||||
line_one,
|
line_one,
|
||||||
@ -49,121 +81,25 @@ pub fn from_delimited_data(
|
|||||||
Ok(stream.to_output_stream())
|
Ok(stream.to_output_stream())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, new)]
|
fn pretty_csv_error(err: csv::Error) -> Option<String> {
|
||||||
pub struct EmptyRegistry {
|
match err.kind() {
|
||||||
#[new(default)]
|
ErrorKind::UnequalLengths {
|
||||||
signatures: indexmap::IndexMap<String, Signature>,
|
pos,
|
||||||
}
|
expected_len,
|
||||||
|
len,
|
||||||
impl EmptyRegistry {}
|
} => {
|
||||||
|
if let Some(pos) = pos {
|
||||||
impl SignatureRegistry for EmptyRegistry {
|
Some(format!(
|
||||||
fn has(&self, _name: &str) -> bool {
|
"Line {}: expected {} fields, found {}",
|
||||||
false
|
pos.line(),
|
||||||
}
|
expected_len,
|
||||||
fn get(&self, _name: &str) -> Option<Signature> {
|
len
|
||||||
None
|
))
|
||||||
}
|
|
||||||
fn clone_box(&self) -> Box<dyn SignatureRegistry> {
|
|
||||||
Box::new(self.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_delimited_string_to_value(
|
|
||||||
s: String,
|
|
||||||
headerless: bool,
|
|
||||||
sep: char,
|
|
||||||
tag: impl Into<Tag>,
|
|
||||||
) -> Result<Vec<Value>, ShellError> {
|
|
||||||
let tag = tag.into();
|
|
||||||
|
|
||||||
let mut entries = s.lines();
|
|
||||||
|
|
||||||
let mut fields = vec![];
|
|
||||||
let mut out = vec![];
|
|
||||||
|
|
||||||
if let Some(first_entry) = entries.next() {
|
|
||||||
let tokens = match parse(&sep.to_string(), nom_input(first_entry)) {
|
|
||||||
Ok((_, tokens)) => tokens,
|
|
||||||
Err(err) => return Err(ShellError::parse_error(err)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let tokens_span = tokens.span;
|
|
||||||
let source: nu_source::Text = tokens_span.slice(&first_entry).into();
|
|
||||||
|
|
||||||
if !headerless {
|
|
||||||
fields = tokens
|
|
||||||
.item
|
|
||||||
.iter()
|
|
||||||
.filter(|token| !token.is_separator())
|
|
||||||
.map(|field| field.source(&source).to_string())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
}
|
|
||||||
|
|
||||||
let registry = Box::new(EmptyRegistry::new());
|
|
||||||
let ctx = ExpandContext::new(registry, &source, None);
|
|
||||||
|
|
||||||
let mut iterator = TokensIterator::new(&tokens.item, ctx, tokens_span);
|
|
||||||
let (results, tokens_identified) = iterator.expand(LineSeparatedShape);
|
|
||||||
let results = results?;
|
|
||||||
|
|
||||||
let mut row = TaggedDictBuilder::new(&tag);
|
|
||||||
|
|
||||||
if headerless {
|
|
||||||
let fallback_columns = (1..=tokens_identified)
|
|
||||||
.map(|i| format!("Column{}", i))
|
|
||||||
.collect::<Vec<String>>();
|
|
||||||
|
|
||||||
for (idx, field) in results.into_iter().enumerate() {
|
|
||||||
let key = if headerless {
|
|
||||||
&fallback_columns[idx]
|
|
||||||
} else {
|
|
||||||
&fields[idx]
|
|
||||||
};
|
|
||||||
|
|
||||||
row.insert_value(key, field.into_value(&tag));
|
|
||||||
}
|
|
||||||
|
|
||||||
out.push(row.into_value())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for entry in entries {
|
|
||||||
let tokens = match parse(&sep.to_string(), nom_input(entry)) {
|
|
||||||
Ok((_, tokens)) => tokens,
|
|
||||||
Err(err) => return Err(ShellError::parse_error(err)),
|
|
||||||
};
|
|
||||||
let tokens_span = tokens.span;
|
|
||||||
|
|
||||||
let source: nu_source::Text = tokens_span.slice(&entry).into();
|
|
||||||
let registry = Box::new(EmptyRegistry::new());
|
|
||||||
let ctx = ExpandContext::new(registry, &source, None);
|
|
||||||
|
|
||||||
let mut iterator = TokensIterator::new(&tokens.item, ctx, tokens_span);
|
|
||||||
let (results, tokens_identified) = iterator.expand(LineSeparatedShape);
|
|
||||||
let results = results?;
|
|
||||||
|
|
||||||
let mut row = TaggedDictBuilder::new(&tag);
|
|
||||||
|
|
||||||
let fallback_columns = (1..=tokens_identified)
|
|
||||||
.map(|i| format!("Column{}", i))
|
|
||||||
.collect::<Vec<String>>();
|
|
||||||
|
|
||||||
for (idx, field) in results.into_iter().enumerate() {
|
|
||||||
let key = if headerless {
|
|
||||||
&fallback_columns[idx]
|
|
||||||
} else {
|
} else {
|
||||||
match fields.get(idx) {
|
Some(format!("Expected {} fields, found {}", expected_len, len))
|
||||||
Some(key) => key,
|
}
|
||||||
None => &fallback_columns[idx],
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
row.insert_value(key, field.into_value(&tag));
|
|
||||||
}
|
}
|
||||||
|
ErrorKind::Seek => Some("Internal error while parsing csv".to_string()),
|
||||||
out.push(row.into_value())
|
_ => None,
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(out)
|
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ impl WholeStreamCommand for Histogram {
|
|||||||
"the name of the column to graph by",
|
"the name of the column to graph by",
|
||||||
)
|
)
|
||||||
.rest(
|
.rest(
|
||||||
SyntaxShape::Member,
|
SyntaxShape::String,
|
||||||
"column name to give the histogram's frequency column",
|
"column name to give the histogram's frequency column",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,7 @@ impl WholeStreamCommand for Nth {
|
|||||||
Signature::build("nth")
|
Signature::build("nth")
|
||||||
.required(
|
.required(
|
||||||
"row number",
|
"row number",
|
||||||
SyntaxShape::Any,
|
SyntaxShape::Int,
|
||||||
"the number of the row to return",
|
"the number of the row to return",
|
||||||
)
|
)
|
||||||
.rest(SyntaxShape::Any, "Optionally return more rows")
|
.rest(SyntaxShape::Any, "Optionally return more rows")
|
||||||
|
@ -18,7 +18,7 @@ impl WholeStreamCommand for Reject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("reject").rest(SyntaxShape::Member, "the names of columns to remove")
|
Signature::build("reject").rest(SyntaxShape::String, "the names of columns to remove")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
|
@ -26,7 +26,7 @@ impl WholeStreamCommand for Rename {
|
|||||||
"the name of the column to rename for",
|
"the name of the column to rename for",
|
||||||
)
|
)
|
||||||
.rest(
|
.rest(
|
||||||
SyntaxShape::Member,
|
SyntaxShape::String,
|
||||||
"Additional column name(s) to rename for",
|
"Additional column name(s) to rename for",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ impl WholeStreamCommand for SplitColumn {
|
|||||||
"the character that denotes what separates columns",
|
"the character that denotes what separates columns",
|
||||||
)
|
)
|
||||||
.switch("collapse-empty", "remove empty columns", Some('c'))
|
.switch("collapse-empty", "remove empty columns", Some('c'))
|
||||||
.rest(SyntaxShape::Member, "column names to give the new columns")
|
.rest(SyntaxShape::String, "column names to give the new columns")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
|
@ -4,7 +4,7 @@ use crate::shell::shell_manager::ShellManager;
|
|||||||
use crate::stream::{InputStream, OutputStream};
|
use crate::stream::{InputStream, OutputStream};
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::{hir, hir::syntax_shape::ExpandContext, hir::syntax_shape::SignatureRegistry};
|
use nu_parser::{hir, SignatureRegistry};
|
||||||
use nu_protocol::Signature;
|
use nu_protocol::Signature;
|
||||||
use nu_source::{Tag, Text};
|
use nu_source::{Tag, Text};
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
@ -92,17 +92,6 @@ impl Context {
|
|||||||
&self.registry
|
&self.registry
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expand_context<'context>(
|
|
||||||
&'context self,
|
|
||||||
source: &'context Text,
|
|
||||||
) -> ExpandContext {
|
|
||||||
ExpandContext::new(
|
|
||||||
Box::new(self.registry.clone()),
|
|
||||||
source,
|
|
||||||
self.shell_manager.homedir(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
|
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
|
||||||
let registry = CommandRegistry::new();
|
let registry = CommandRegistry::new();
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ use chrono::{DateTime, Utc};
|
|||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::{hir, CompareOperator};
|
use nu_parser::hir;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
Evaluate, EvaluateTrait, Primitive, Scope, ShellTypeName, SpannedTypeName, TaggedDictBuilder,
|
Evaluate, EvaluateTrait, Primitive, Scope, ShellTypeName, SpannedTypeName, TaggedDictBuilder,
|
||||||
UntaggedValue, Value,
|
UntaggedValue, Value,
|
||||||
@ -23,7 +23,7 @@ use std::time::SystemTime;
|
|||||||
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, new, Serialize)]
|
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, new, Serialize)]
|
||||||
pub struct Operation {
|
pub struct Operation {
|
||||||
pub(crate) left: Value,
|
pub(crate) left: Value,
|
||||||
pub(crate) operator: CompareOperator,
|
pub(crate) operator: hir::CompareOperator,
|
||||||
pub(crate) right: Value,
|
pub(crate) right: Value,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use nu_parser::Number;
|
use nu_parser::hir::Number;
|
||||||
use nu_protocol::Primitive;
|
use nu_protocol::Primitive;
|
||||||
|
|
||||||
pub fn number(number: impl Into<Number>) -> Primitive {
|
pub fn number(number: impl Into<Number>) -> Primitive {
|
||||||
|
@ -3,7 +3,7 @@ use crate::data::base::shape::{Column, InlineShape};
|
|||||||
use crate::data::primitive::style_primitive;
|
use crate::data::primitive::style_primitive;
|
||||||
use chrono::DateTime;
|
use chrono::DateTime;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::CompareOperator;
|
use nu_parser::hir::CompareOperator;
|
||||||
use nu_protocol::{Primitive, Type, UntaggedValue};
|
use nu_protocol::{Primitive, Type, UntaggedValue};
|
||||||
use nu_source::{DebugDocBuilder, PrettyDebug, Tagged};
|
use nu_source::{DebugDocBuilder, PrettyDebug, Tagged};
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ pub(crate) fn evaluate_args(
|
|||||||
hir::NamedValue::PresentSwitch(tag) => {
|
hir::NamedValue::PresentSwitch(tag) => {
|
||||||
results.insert(name.clone(), UntaggedValue::boolean(true).into_value(tag));
|
results.insert(name.clone(), UntaggedValue::boolean(true).into_value(tag));
|
||||||
}
|
}
|
||||||
hir::NamedValue::Value(expr) => {
|
hir::NamedValue::Value(_, expr) => {
|
||||||
results.insert(
|
results.insert(
|
||||||
name.clone(),
|
name.clone(),
|
||||||
evaluate_baseline_expr(expr, registry, scope, source)?,
|
evaluate_baseline_expr(expr, registry, scope, source)?,
|
||||||
|
@ -35,22 +35,27 @@ pub(crate) fn evaluate_baseline_expr(
|
|||||||
Expression::Command(_) => evaluate_command(tag, scope, source),
|
Expression::Command(_) => evaluate_command(tag, scope, source),
|
||||||
Expression::ExternalCommand(external) => evaluate_external(external, scope, source),
|
Expression::ExternalCommand(external) => evaluate_external(external, scope, source),
|
||||||
Expression::Binary(binary) => {
|
Expression::Binary(binary) => {
|
||||||
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
|
let left = evaluate_baseline_expr(&binary.left, registry, scope, source)?;
|
||||||
let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?;
|
let right = evaluate_baseline_expr(&binary.right, registry, scope, source)?;
|
||||||
|
|
||||||
trace!("left={:?} right={:?}", left.value, right.value);
|
trace!("left={:?} right={:?}", left.value, right.value);
|
||||||
|
|
||||||
match apply_operator(**binary.op(), &left, &right) {
|
match binary.op.expr {
|
||||||
Ok(result) => Ok(result.into_value(tag)),
|
Expression::Literal(hir::Literal::Operator(op)) => {
|
||||||
Err((left_type, right_type)) => Err(ShellError::coerce_error(
|
match apply_operator(op, &left, &right) {
|
||||||
left_type.spanned(binary.left().span),
|
Ok(result) => Ok(result.into_value(tag)),
|
||||||
right_type.spanned(binary.right().span),
|
Err((left_type, right_type)) => Err(ShellError::coerce_error(
|
||||||
)),
|
left_type.spanned(binary.left.span),
|
||||||
|
right_type.spanned(binary.right.span),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expression::Range(range) => {
|
Expression::Range(range) => {
|
||||||
let left = range.left();
|
let left = &range.left;
|
||||||
let right = range.right();
|
let right = &range.right;
|
||||||
|
|
||||||
let left = evaluate_baseline_expr(left, registry, scope, source)?;
|
let left = evaluate_baseline_expr(left, registry, scope, source)?;
|
||||||
let right = evaluate_baseline_expr(right, registry, scope, source)?;
|
let right = evaluate_baseline_expr(right, registry, scope, source)?;
|
||||||
@ -85,10 +90,10 @@ pub(crate) fn evaluate_baseline_expr(
|
|||||||
)))
|
)))
|
||||||
.into_value(&tag)),
|
.into_value(&tag)),
|
||||||
Expression::Path(path) => {
|
Expression::Path(path) => {
|
||||||
let value = evaluate_baseline_expr(path.head(), registry, scope, source)?;
|
let value = evaluate_baseline_expr(&path.head, registry, scope, source)?;
|
||||||
let mut item = value;
|
let mut item = value;
|
||||||
|
|
||||||
for member in path.tail() {
|
for member in &path.tail {
|
||||||
let next = item.get_data_by_member(member);
|
let next = item.get_data_by_member(member);
|
||||||
|
|
||||||
match next {
|
match next {
|
||||||
@ -123,28 +128,29 @@ pub(crate) fn evaluate_baseline_expr(
|
|||||||
Ok(item.value.into_value(tag))
|
Ok(item.value.into_value(tag))
|
||||||
}
|
}
|
||||||
Expression::Boolean(_boolean) => unimplemented!(),
|
Expression::Boolean(_boolean) => unimplemented!(),
|
||||||
|
Expression::Garbage => unimplemented!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn evaluate_literal(literal: &hir::Literal, span: Span, source: &Text) -> Value {
|
fn evaluate_literal(literal: &hir::Literal, span: Span, source: &Text) -> Value {
|
||||||
match &literal {
|
match &literal {
|
||||||
hir::Literal::ColumnPath(path) => {
|
hir::Literal::ColumnPath(path) => {
|
||||||
let members = path
|
let members = path.iter().map(|member| member.to_path_member()).collect();
|
||||||
.iter()
|
|
||||||
.map(|member| member.to_path_member(source))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(members)))
|
UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(members)))
|
||||||
.into_value(span)
|
.into_value(span)
|
||||||
}
|
}
|
||||||
hir::Literal::Number(int) => match int {
|
hir::Literal::Number(int) => match int {
|
||||||
nu_parser::Number::Int(i) => UntaggedValue::int(i.clone()).into_value(span),
|
nu_parser::hir::Number::Int(i) => UntaggedValue::int(i.clone()).into_value(span),
|
||||||
nu_parser::Number::Decimal(d) => UntaggedValue::decimal(d.clone()).into_value(span),
|
nu_parser::hir::Number::Decimal(d) => {
|
||||||
|
UntaggedValue::decimal(d.clone()).into_value(span)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
hir::Literal::Size(int, unit) => unit.compute(&int).into_value(span),
|
hir::Literal::Size(int, unit) => unit.compute(&int).into_value(span),
|
||||||
hir::Literal::String(tag) => UntaggedValue::string(tag.slice(source)).into_value(span),
|
hir::Literal::String(string) => UntaggedValue::string(string).into_value(span),
|
||||||
hir::Literal::GlobPattern(pattern) => UntaggedValue::pattern(pattern).into_value(span),
|
hir::Literal::GlobPattern(pattern) => UntaggedValue::pattern(pattern).into_value(span),
|
||||||
hir::Literal::Bare => UntaggedValue::string(span.slice(source)).into_value(span),
|
hir::Literal::Bare => UntaggedValue::string(span.slice(source)).into_value(span),
|
||||||
|
hir::Literal::Operator(_) => unimplemented!("Not sure what to do with operator yet"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -157,7 +163,7 @@ fn evaluate_reference(
|
|||||||
trace!("Evaluating {:?} with Scope {:?}", name, scope);
|
trace!("Evaluating {:?} with Scope {:?}", name, scope);
|
||||||
match name {
|
match name {
|
||||||
hir::Variable::It(_) => Ok(scope.it.value.clone().into_value(tag)),
|
hir::Variable::It(_) => Ok(scope.it.value.clone().into_value(tag)),
|
||||||
hir::Variable::Other(inner) => match inner.slice(source) {
|
hir::Variable::Other(_, span) => match span.slice(source) {
|
||||||
x if x == "nu" => crate::evaluate::variables::nu(tag),
|
x if x == "nu" => crate::evaluate::variables::nu(tag),
|
||||||
x => Ok(scope
|
x => Ok(scope
|
||||||
.vars
|
.vars
|
||||||
@ -174,7 +180,7 @@ fn evaluate_external(
|
|||||||
_source: &Text,
|
_source: &Text,
|
||||||
) -> Result<Value, ShellError> {
|
) -> Result<Value, ShellError> {
|
||||||
Err(ShellError::syntax_error(
|
Err(ShellError::syntax_error(
|
||||||
"Unexpected external command".spanned(*external.name()),
|
"Unexpected external command".spanned(external.name.span),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use crate::data::value;
|
use crate::data::value;
|
||||||
use nu_parser::CompareOperator;
|
use nu_parser::hir::CompareOperator;
|
||||||
use nu_protocol::{Primitive, ShellTypeName, UntaggedValue, Value};
|
use nu_protocol::{Primitive, ShellTypeName, UntaggedValue, Value};
|
||||||
use std::ops::Not;
|
use std::ops::Not;
|
||||||
|
|
||||||
|
@ -27,7 +27,6 @@ pub use crate::data::primitive;
|
|||||||
pub use crate::data::value;
|
pub use crate::data::value;
|
||||||
pub use crate::env::environment_syncer::EnvironmentSyncer;
|
pub use crate::env::environment_syncer::EnvironmentSyncer;
|
||||||
pub use crate::env::host::BasicHost;
|
pub use crate::env::host::BasicHost;
|
||||||
pub use nu_parser::TokenTreeBuilder;
|
|
||||||
pub use nu_value_ext::ValueExt;
|
pub use nu_value_ext::ValueExt;
|
||||||
pub use num_traits::cast::ToPrimitive;
|
pub use num_traits::cast::ToPrimitive;
|
||||||
|
|
||||||
|
@ -93,8 +93,8 @@ pub(crate) use futures::stream::BoxStream;
|
|||||||
pub(crate) use futures::{FutureExt, Stream, StreamExt};
|
pub(crate) use futures::{FutureExt, Stream, StreamExt};
|
||||||
pub(crate) use nu_protocol::{EvaluateTrait, MaybeOwned};
|
pub(crate) use nu_protocol::{EvaluateTrait, MaybeOwned};
|
||||||
pub(crate) use nu_source::{
|
pub(crate) use nu_source::{
|
||||||
b, AnchorLocation, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span,
|
b, AnchorLocation, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource, Span, SpannedItem, Tag,
|
||||||
SpannedItem, Tag, TaggedItem, Text,
|
TaggedItem, Text,
|
||||||
};
|
};
|
||||||
pub(crate) use nu_value_ext::ValueExt;
|
pub(crate) use nu_value_ext::ValueExt;
|
||||||
pub(crate) use num_bigint::BigInt;
|
pub(crate) use num_bigint::BigInt;
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
use crate::context::CommandRegistry;
|
use crate::context::CommandRegistry;
|
||||||
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use nu_parser::ExpandContext;
|
|
||||||
use nu_source::{HasSpan, Text};
|
|
||||||
use rustyline::completion::{Completer, FilenameCompleter};
|
use rustyline::completion::{Completer, FilenameCompleter};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
@ -20,14 +18,6 @@ impl NuCompleter {
|
|||||||
pos: usize,
|
pos: usize,
|
||||||
context: &rustyline::Context,
|
context: &rustyline::Context,
|
||||||
) -> rustyline::Result<(usize, Vec<rustyline::completion::Pair>)> {
|
) -> rustyline::Result<(usize, Vec<rustyline::completion::Pair>)> {
|
||||||
let text = Text::from(line);
|
|
||||||
let expand_context =
|
|
||||||
ExpandContext::new(Box::new(self.commands.clone()), &text, self.homedir.clone());
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
// smarter completions
|
|
||||||
let shapes = nu_parser::pipeline_shapes(line, expand_context);
|
|
||||||
|
|
||||||
let commands: Vec<String> = self.commands.names();
|
let commands: Vec<String> = self.commands.names();
|
||||||
|
|
||||||
let line_chars: Vec<_> = line[..pos].chars().collect();
|
let line_chars: Vec<_> = line[..pos].chars().collect();
|
||||||
@ -44,7 +34,17 @@ impl NuCompleter {
|
|||||||
|
|
||||||
// See if we're a flag
|
// See if we're a flag
|
||||||
if pos > 0 && replace_pos < line_chars.len() && line_chars[replace_pos] == '-' {
|
if pos > 0 && replace_pos < line_chars.len() && line_chars[replace_pos] == '-' {
|
||||||
completions = self.get_matching_arguments(&line_chars, line, replace_pos, pos);
|
if let Ok(lite_pipeline) = nu_parser::lite_parse(line, 0) {
|
||||||
|
completions = self.get_matching_arguments(
|
||||||
|
&lite_pipeline,
|
||||||
|
&line_chars,
|
||||||
|
line,
|
||||||
|
replace_pos,
|
||||||
|
pos,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
completions = self.file_completer.complete(line, pos, context)?.1;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
completions = self.file_completer.complete(line, pos, context)?.1;
|
completions = self.file_completer.complete(line, pos, context)?.1;
|
||||||
|
|
||||||
@ -96,6 +96,7 @@ impl NuCompleter {
|
|||||||
|
|
||||||
fn get_matching_arguments(
|
fn get_matching_arguments(
|
||||||
&self,
|
&self,
|
||||||
|
lite_parse: &nu_parser::LitePipeline,
|
||||||
line_chars: &[char],
|
line_chars: &[char],
|
||||||
line: &str,
|
line: &str,
|
||||||
replace_pos: usize,
|
replace_pos: usize,
|
||||||
@ -108,40 +109,23 @@ impl NuCompleter {
|
|||||||
let replace_string = (replace_pos..pos).map(|_| " ").collect::<String>();
|
let replace_string = (replace_pos..pos).map(|_| " ").collect::<String>();
|
||||||
line_copy.replace_range(replace_pos..pos, &replace_string);
|
line_copy.replace_range(replace_pos..pos, &replace_string);
|
||||||
|
|
||||||
if let Ok(val) = nu_parser::parse(&line_copy) {
|
let result = nu_parser::classify_pipeline(&lite_parse, &self.commands);
|
||||||
let source = Text::from(line);
|
|
||||||
let pipeline_list = vec![val.clone()];
|
|
||||||
|
|
||||||
let expand_context = nu_parser::ExpandContext {
|
for command in result.commands.list {
|
||||||
homedir: None,
|
if let nu_parser::ClassifiedCommand::Internal(nu_parser::InternalCommand {
|
||||||
registry: Box::new(self.commands.clone()),
|
args, ..
|
||||||
source: &source,
|
}) = command
|
||||||
};
|
{
|
||||||
|
if replace_pos >= args.span.start() && replace_pos <= args.span.end() {
|
||||||
|
if let Some(named) = args.named {
|
||||||
|
for (name, _) in named.iter() {
|
||||||
|
let full_flag = format!("--{}", name);
|
||||||
|
|
||||||
let mut iterator =
|
if full_flag.starts_with(&substring) {
|
||||||
nu_parser::TokensIterator::new(&pipeline_list, expand_context, val.span());
|
matching_arguments.push(rustyline::completion::Pair {
|
||||||
|
display: full_flag.clone(),
|
||||||
let result = iterator.expand_infallible(nu_parser::PipelineShape);
|
replacement: full_flag,
|
||||||
|
});
|
||||||
if result.failed.is_none() {
|
|
||||||
for command in result.commands.list {
|
|
||||||
if let nu_parser::ClassifiedCommand::Internal(nu_parser::InternalCommand {
|
|
||||||
args,
|
|
||||||
..
|
|
||||||
}) = command
|
|
||||||
{
|
|
||||||
if replace_pos >= args.span.start() && replace_pos <= args.span.end() {
|
|
||||||
if let Some(named) = args.named {
|
|
||||||
for (name, _) in named.iter() {
|
|
||||||
let full_flag = format!("--{}", name);
|
|
||||||
|
|
||||||
if full_flag.starts_with(&substring) {
|
|
||||||
matching_arguments.push(rustyline::completion::Pair {
|
|
||||||
display: full_flag.clone(),
|
|
||||||
replacement: full_flag,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -10,7 +10,6 @@ use crate::shell::completer::NuCompleter;
|
|||||||
use crate::shell::shell::Shell;
|
use crate::shell::shell::Shell;
|
||||||
use crate::utils::FileStructure;
|
use crate::utils::FileStructure;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::ExpandContext;
|
|
||||||
use nu_protocol::{Primitive, ReturnSuccess, UntaggedValue};
|
use nu_protocol::{Primitive, ReturnSuccess, UntaggedValue};
|
||||||
use rustyline::completion::FilenameCompleter;
|
use rustyline::completion::FilenameCompleter;
|
||||||
use rustyline::hint::{Hinter, HistoryHinter};
|
use rustyline::hint::{Hinter, HistoryHinter};
|
||||||
@ -1149,13 +1148,7 @@ impl Shell for FilesystemShell {
|
|||||||
self.completer.complete(line, pos, ctx)
|
self.completer.complete(line, pos, ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hint(
|
fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> {
|
||||||
&self,
|
|
||||||
line: &str,
|
|
||||||
pos: usize,
|
|
||||||
ctx: &rustyline::Context<'_>,
|
|
||||||
_expand_context: ExpandContext,
|
|
||||||
) -> Option<String> {
|
|
||||||
self.hinter.hint(line, pos, ctx)
|
self.hinter.hint(line, pos, ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,6 @@ use crate::data::command_dict;
|
|||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::shell::shell::Shell;
|
use crate::shell::shell::Shell;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::ExpandContext;
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
Primitive, ReturnSuccess, ShellTypeName, TaggedDictBuilder, UntaggedValue, Value,
|
Primitive, ReturnSuccess, ShellTypeName, TaggedDictBuilder, UntaggedValue, Value,
|
||||||
};
|
};
|
||||||
@ -249,13 +248,7 @@ impl Shell for HelpShell {
|
|||||||
Ok((replace_pos, completions))
|
Ok((replace_pos, completions))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hint(
|
fn hint(&self, _line: &str, _pos: usize, _ctx: &rustyline::Context<'_>) -> Option<String> {
|
||||||
&self,
|
|
||||||
_line: &str,
|
|
||||||
_pos: usize,
|
|
||||||
_ctx: &rustyline::Context<'_>,
|
|
||||||
_context: ExpandContext,
|
|
||||||
) -> Option<String> {
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
use crate::context::Context;
|
use crate::context::Context;
|
||||||
use ansi_term::{Color, Style};
|
use ansi_term::{Color, Style};
|
||||||
use log::log_enabled;
|
use nu_parser::hir::FlatShape;
|
||||||
use nu_parser::{FlatShape, PipelineShape, ShapeResult, Token, TokensIterator};
|
use nu_parser::SignatureRegistry;
|
||||||
use nu_protocol::{errln, outln};
|
use nu_source::{Span, Spanned, Tag, Tagged};
|
||||||
use nu_source::{nom_input, HasSpan, Tag, Tagged, Text};
|
|
||||||
use rustyline::completion::Completer;
|
use rustyline::completion::Completer;
|
||||||
use rustyline::error::ReadlineError;
|
use rustyline::error::ReadlineError;
|
||||||
use rustyline::highlight::Highlighter;
|
use rustyline::highlight::Highlighter;
|
||||||
@ -38,10 +37,7 @@ impl Completer for Helper {
|
|||||||
|
|
||||||
impl Hinter for Helper {
|
impl Hinter for Helper {
|
||||||
fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> {
|
fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> {
|
||||||
let text = Text::from(line);
|
self.context.shell_manager.hint(line, pos, ctx)
|
||||||
self.context
|
|
||||||
.shell_manager
|
|
||||||
.hint(line, pos, ctx, self.context.expand_context(&text))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -65,49 +61,19 @@ impl Highlighter for Helper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> {
|
fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> {
|
||||||
let tokens = nu_parser::pipeline(nom_input(line));
|
let lite_pipeline = nu_parser::lite_parse(line, 0);
|
||||||
|
|
||||||
match tokens {
|
match lite_pipeline {
|
||||||
Err(_) => Cow::Borrowed(line),
|
Err(_) => Cow::Borrowed(line),
|
||||||
Ok((_rest, v)) => {
|
Ok(lp) => {
|
||||||
let pipeline = match v.as_pipeline() {
|
let classified =
|
||||||
Err(_) => return Cow::Borrowed(line),
|
nu_parser::classify_pipeline(&lp, &self.context.registry().clone_box());
|
||||||
Ok(v) => v,
|
|
||||||
};
|
|
||||||
|
|
||||||
let text = Text::from(line);
|
let shapes = nu_parser::shapes(&classified.commands);
|
||||||
let expand_context = self.context.expand_context(&text);
|
let mut painter = Painter::new(line);
|
||||||
|
|
||||||
let tokens = vec![Token::Pipeline(pipeline).into_spanned(v.span())];
|
|
||||||
let mut tokens = TokensIterator::new(&tokens[..], expand_context, v.span());
|
|
||||||
|
|
||||||
let shapes = {
|
|
||||||
// We just constructed a token list that only contains a pipeline, so it can't fail
|
|
||||||
let result = tokens.expand_infallible(PipelineShape);
|
|
||||||
|
|
||||||
if let Some(failure) = result.failed {
|
|
||||||
errln!(
|
|
||||||
"BUG: PipelineShape didn't find a pipeline :: {:#?}",
|
|
||||||
failure
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
tokens.finish_tracer();
|
|
||||||
|
|
||||||
tokens.state().shapes()
|
|
||||||
};
|
|
||||||
|
|
||||||
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
|
||||||
outln!("");
|
|
||||||
let _ =
|
|
||||||
ptree::print_tree(&tokens.expand_tracer().clone().print(Text::from(line)));
|
|
||||||
outln!("");
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut painter = Painter::new();
|
|
||||||
|
|
||||||
for shape in shapes {
|
for shape in shapes {
|
||||||
painter.paint_shape(&shape, line);
|
painter.paint_shape(&shape);
|
||||||
}
|
}
|
||||||
|
|
||||||
Cow::Owned(painter.into_string())
|
Cow::Owned(painter.into_string())
|
||||||
@ -133,73 +99,94 @@ fn vec_tag<T>(input: Vec<Tagged<T>>) -> Option<Tag> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
struct Painter {
|
struct Painter {
|
||||||
current: Style,
|
original: Vec<u8>,
|
||||||
buffer: String,
|
styles: Vec<Style>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Painter {
|
impl Painter {
|
||||||
fn new() -> Painter {
|
fn new(original: &str) -> Painter {
|
||||||
|
let bytes: Vec<u8> = original.bytes().collect();
|
||||||
|
let bytes_count = bytes.len();
|
||||||
Painter {
|
Painter {
|
||||||
current: Style::default(),
|
original: bytes,
|
||||||
buffer: String::new(),
|
styles: vec![Color::White.normal(); bytes_count],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn paint_shape(&mut self, shape: &Spanned<FlatShape>) {
|
||||||
|
let style = match &shape.item {
|
||||||
|
FlatShape::OpenDelimiter(_) => Color::White.normal(),
|
||||||
|
FlatShape::CloseDelimiter(_) => Color::White.normal(),
|
||||||
|
FlatShape::ItVariable | FlatShape::Keyword => Color::Purple.bold(),
|
||||||
|
FlatShape::Variable | FlatShape::Identifier => Color::Purple.normal(),
|
||||||
|
FlatShape::Type => Color::Blue.bold(),
|
||||||
|
FlatShape::CompareOperator => Color::Yellow.normal(),
|
||||||
|
FlatShape::DotDot => Color::Yellow.bold(),
|
||||||
|
FlatShape::Dot => Style::new().fg(Color::White),
|
||||||
|
FlatShape::InternalCommand => Color::Cyan.bold(),
|
||||||
|
FlatShape::ExternalCommand => Color::Cyan.normal(),
|
||||||
|
FlatShape::ExternalWord => Color::Green.bold(),
|
||||||
|
FlatShape::BareMember => Color::Yellow.bold(),
|
||||||
|
FlatShape::StringMember => Color::Yellow.bold(),
|
||||||
|
FlatShape::String => Color::Green.normal(),
|
||||||
|
FlatShape::Path => Color::Cyan.normal(),
|
||||||
|
FlatShape::GlobPattern => Color::Cyan.bold(),
|
||||||
|
FlatShape::Word => Color::Green.normal(),
|
||||||
|
FlatShape::Pipe => Color::Purple.bold(),
|
||||||
|
FlatShape::Flag => Color::Blue.bold(),
|
||||||
|
FlatShape::ShorthandFlag => Color::Blue.bold(),
|
||||||
|
FlatShape::Int => Color::Purple.bold(),
|
||||||
|
FlatShape::Decimal => Color::Purple.bold(),
|
||||||
|
FlatShape::Whitespace | FlatShape::Separator => Color::White.normal(),
|
||||||
|
FlatShape::Comment => Color::Green.bold(),
|
||||||
|
FlatShape::Garbage => Style::new().fg(Color::White).on(Color::Red),
|
||||||
|
FlatShape::Size { number, unit } => {
|
||||||
|
self.paint(Color::Purple.bold(), number);
|
||||||
|
self.paint(Color::Cyan.bold(), unit);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
self.paint(style, &shape.span);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn paint(&mut self, style: Style, span: &Span) {
|
||||||
|
for pos in span.start()..span.end() {
|
||||||
|
self.styles[pos] = style;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn into_string(self) -> String {
|
fn into_string(self) -> String {
|
||||||
self.buffer
|
let mut idx_start = 0;
|
||||||
}
|
let mut idx_end = 1;
|
||||||
|
|
||||||
fn paint_shape(&mut self, shape: &ShapeResult, line: &str) {
|
if self.original.is_empty() {
|
||||||
let style = match &shape {
|
String::new()
|
||||||
ShapeResult::Success(shape) => match shape.item {
|
} else {
|
||||||
FlatShape::OpenDelimiter(_) => Color::White.normal(),
|
let mut builder = String::new();
|
||||||
FlatShape::CloseDelimiter(_) => Color::White.normal(),
|
|
||||||
FlatShape::ItVariable | FlatShape::Keyword => Color::Purple.bold(),
|
|
||||||
FlatShape::Variable | FlatShape::Identifier => Color::Purple.normal(),
|
|
||||||
FlatShape::Type => Color::Blue.bold(),
|
|
||||||
FlatShape::CompareOperator => Color::Yellow.normal(),
|
|
||||||
FlatShape::DotDot => Color::Yellow.bold(),
|
|
||||||
FlatShape::Dot => Style::new().fg(Color::White),
|
|
||||||
FlatShape::InternalCommand => Color::Cyan.bold(),
|
|
||||||
FlatShape::ExternalCommand => Color::Cyan.normal(),
|
|
||||||
FlatShape::ExternalWord => Color::Green.bold(),
|
|
||||||
FlatShape::BareMember => Color::Yellow.bold(),
|
|
||||||
FlatShape::StringMember => Color::Yellow.bold(),
|
|
||||||
FlatShape::String => Color::Green.normal(),
|
|
||||||
FlatShape::Path => Color::Cyan.normal(),
|
|
||||||
FlatShape::GlobPattern => Color::Cyan.bold(),
|
|
||||||
FlatShape::Word => Color::Green.normal(),
|
|
||||||
FlatShape::Pipe => Color::Purple.bold(),
|
|
||||||
FlatShape::Flag => Color::Blue.bold(),
|
|
||||||
FlatShape::ShorthandFlag => Color::Blue.bold(),
|
|
||||||
FlatShape::Int => Color::Purple.bold(),
|
|
||||||
FlatShape::Decimal => Color::Purple.bold(),
|
|
||||||
FlatShape::Whitespace | FlatShape::Separator => Color::White.normal(),
|
|
||||||
FlatShape::Comment => Color::Green.bold(),
|
|
||||||
FlatShape::Garbage => Style::new().fg(Color::White).on(Color::Red),
|
|
||||||
FlatShape::Size { number, unit } => {
|
|
||||||
let number = number.slice(line);
|
|
||||||
let unit = unit.slice(line);
|
|
||||||
|
|
||||||
self.paint(Color::Purple.bold(), number);
|
let mut current_style = self.styles[0];
|
||||||
self.paint(Color::Cyan.bold(), unit);
|
|
||||||
return;
|
while idx_end < self.styles.len() {
|
||||||
|
if self.styles[idx_end] != current_style {
|
||||||
|
// Emit, as we changed styles
|
||||||
|
let intermediate = String::from_utf8_lossy(&self.original[idx_start..idx_end]);
|
||||||
|
|
||||||
|
builder.push_str(&format!("{}", current_style.paint(intermediate)));
|
||||||
|
|
||||||
|
current_style = self.styles[idx_end];
|
||||||
|
idx_start = idx_end;
|
||||||
|
idx_end += 1;
|
||||||
|
} else {
|
||||||
|
idx_end += 1;
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
ShapeResult::Fallback { shape, .. } => match shape.item {
|
|
||||||
FlatShape::Whitespace | FlatShape::Separator => Color::White.normal(),
|
|
||||||
_ => Style::new().fg(Color::White).on(Color::Red),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
self.paint(style, shape.span().slice(line));
|
let intermediate = String::from_utf8_lossy(&self.original[idx_start..idx_end]);
|
||||||
}
|
builder.push_str(&format!("{}", current_style.paint(intermediate)));
|
||||||
|
|
||||||
fn paint(&mut self, style: Style, body: &str) {
|
builder
|
||||||
let infix = self.current.infix(style);
|
}
|
||||||
self.current = style;
|
|
||||||
self.buffer
|
|
||||||
.push_str(&format!("{}{}", infix, style.paint(body)));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,6 @@ use crate::commands::rm::RemoveArgs;
|
|||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::stream::OutputStream;
|
use crate::stream::OutputStream;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::ExpandContext;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
pub trait Shell: std::fmt::Debug {
|
pub trait Shell: std::fmt::Debug {
|
||||||
@ -35,11 +34,5 @@ pub trait Shell: std::fmt::Debug {
|
|||||||
ctx: &rustyline::Context<'_>,
|
ctx: &rustyline::Context<'_>,
|
||||||
) -> Result<(usize, Vec<rustyline::completion::Pair>), rustyline::error::ReadlineError>;
|
) -> Result<(usize, Vec<rustyline::completion::Pair>), rustyline::error::ReadlineError>;
|
||||||
|
|
||||||
fn hint(
|
fn hint(&self, _line: &str, _pos: usize, _ctx: &rustyline::Context<'_>) -> Option<String>;
|
||||||
&self,
|
|
||||||
_line: &str,
|
|
||||||
_pos: usize,
|
|
||||||
_ctx: &rustyline::Context<'_>,
|
|
||||||
_context: ExpandContext,
|
|
||||||
) -> Option<String>;
|
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,6 @@ use crate::shell::filesystem_shell::FilesystemShell;
|
|||||||
use crate::shell::shell::Shell;
|
use crate::shell::shell::Shell;
|
||||||
use crate::stream::OutputStream;
|
use crate::stream::OutputStream;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::ExpandContext;
|
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -95,9 +94,9 @@ impl ShellManager {
|
|||||||
line: &str,
|
line: &str,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
ctx: &rustyline::Context<'_>,
|
ctx: &rustyline::Context<'_>,
|
||||||
context: ExpandContext,
|
//context: ExpandContext,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
self.shells.lock()[self.current_shell()].hint(line, pos, ctx, context)
|
self.shells.lock()[self.current_shell()].hint(line, pos, ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn next(&mut self) {
|
pub fn next(&mut self) {
|
||||||
|
@ -8,7 +8,6 @@ use crate::prelude::*;
|
|||||||
use crate::shell::shell::Shell;
|
use crate::shell::shell::Shell;
|
||||||
use crate::utils::ValueStructure;
|
use crate::utils::ValueStructure;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::ExpandContext;
|
|
||||||
use nu_protocol::{ReturnSuccess, ShellTypeName, UntaggedValue, Value};
|
use nu_protocol::{ReturnSuccess, ShellTypeName, UntaggedValue, Value};
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
@ -283,13 +282,7 @@ impl Shell for ValueShell {
|
|||||||
Ok((replace_pos, completions))
|
Ok((replace_pos, completions))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hint(
|
fn hint(&self, _line: &str, _pos: usize, _ctx: &rustyline::Context<'_>) -> Option<String> {
|
||||||
&self,
|
|
||||||
_line: &str,
|
|
||||||
_pos: usize,
|
|
||||||
_ctx: &rustyline::Context<'_>,
|
|
||||||
_context: ExpandContext,
|
|
||||||
) -> Option<String> {
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,7 @@ use crate::data::value::compare_values;
|
|||||||
use crate::data::TaggedListBuilder;
|
use crate::data::TaggedListBuilder;
|
||||||
use chrono::{DateTime, NaiveDate, Utc};
|
use chrono::{DateTime, NaiveDate, Utc};
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::CompareOperator;
|
use nu_parser::hir::CompareOperator;
|
||||||
use nu_protocol::{Primitive, TaggedDictBuilder, UntaggedValue, Value};
|
use nu_protocol::{Primitive, TaggedDictBuilder, UntaggedValue, Value};
|
||||||
use nu_source::{SpannedItem, Tag, Tagged, TaggedItem};
|
use nu_source::{SpannedItem, Tag, Tagged, TaggedItem};
|
||||||
use nu_value_ext::{get_data_by_key, ValueExt};
|
use nu_value_ext::{get_data_by_key, ValueExt};
|
||||||
|
@ -338,32 +338,6 @@ fn valuesystem_change_current_path_to_parent_path() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn valuesystem_change_to_home_directory() {
|
|
||||||
Playground::setup("cd_test_14", |dirs, sandbox| {
|
|
||||||
sandbox.with_files(vec![FileWithContent(
|
|
||||||
"sample.toml",
|
|
||||||
r#"
|
|
||||||
[paquete]
|
|
||||||
el = "pollo loco"
|
|
||||||
"#,
|
|
||||||
)]);
|
|
||||||
|
|
||||||
let actual = nu!(
|
|
||||||
cwd: dirs.test(),
|
|
||||||
r#"
|
|
||||||
enter sample.toml
|
|
||||||
cd paquete
|
|
||||||
cd ~
|
|
||||||
pwd | echo $it
|
|
||||||
exit
|
|
||||||
"#
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(PathBuf::from(actual), PathBuf::from("/"));
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn valuesystem_change_to_a_path_containing_spaces() {
|
fn valuesystem_change_to_a_path_containing_spaces() {
|
||||||
Playground::setup("cd_test_15", |dirs, sandbox| {
|
Playground::setup("cd_test_15", |dirs, sandbox| {
|
||||||
|
@ -37,5 +37,5 @@ fn sort_primitive_values() {
|
|||||||
"#
|
"#
|
||||||
));
|
));
|
||||||
|
|
||||||
assert_eq!(actual, "authors = [\"Yehuda Katz <wycats@gmail.com>\"]");
|
assert_eq!(actual, "authors = [\"The Nu Project Contributors\"]");
|
||||||
}
|
}
|
||||||
|
@ -6,19 +6,23 @@ use nu_test_support::{nu, pipeline};
|
|||||||
fn all() {
|
fn all() {
|
||||||
Playground::setup("sum_test_1", |dirs, sandbox| {
|
Playground::setup("sum_test_1", |dirs, sandbox| {
|
||||||
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
||||||
"meals.csv",
|
"meals.json",
|
||||||
r#"
|
r#"
|
||||||
description,calories
|
{
|
||||||
"1 large egg",90
|
meals: [
|
||||||
"1 cup white rice",250
|
{description: "1 large egg", calories: 90},
|
||||||
"1 tablespoon fish oil",108
|
{description: "1 cup white rice", calories: 250},
|
||||||
|
{description: "1 tablespoon fish oil", calories: 108}
|
||||||
|
]
|
||||||
|
}
|
||||||
"#,
|
"#,
|
||||||
)]);
|
)]);
|
||||||
|
|
||||||
let actual = nu!(
|
let actual = nu!(
|
||||||
cwd: dirs.test(), pipeline(
|
cwd: dirs.test(), pipeline(
|
||||||
r#"
|
r#"
|
||||||
open meals.csv
|
open meals.json
|
||||||
|
| get meals
|
||||||
| get calories
|
| get calories
|
||||||
| sum
|
| sum
|
||||||
| echo $it
|
| echo $it
|
||||||
|
@ -78,7 +78,7 @@ fn infers_types() {
|
|||||||
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
||||||
"los_cuatro_mosqueteros.csv",
|
"los_cuatro_mosqueteros.csv",
|
||||||
r#"
|
r#"
|
||||||
first_name,last_name,rusty_luck
|
first_name,last_name,rusty_luck,d
|
||||||
Andrés,Robalino,1,d
|
Andrés,Robalino,1,d
|
||||||
Jonathan,Turner,1,d
|
Jonathan,Turner,1,d
|
||||||
Yehuda,Katz,1,d
|
Yehuda,Katz,1,d
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-errors"
|
name = "nu-errors"
|
||||||
version = "0.12.0"
|
version = "0.12.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["The Nu Project Contributors"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "Core error subsystem for Nushell"
|
description = "Core error subsystem for Nushell"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "nu-macros"
|
|
||||||
version = "0.12.0"
|
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
|
||||||
edition = "2018"
|
|
||||||
description = "Core macros for building Nushell"
|
|
||||||
license = "MIT"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
doctest = false
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.12.0" }
|
|
@ -1,25 +0,0 @@
|
|||||||
#[macro_export]
|
|
||||||
macro_rules! signature {
|
|
||||||
(def $name:tt {
|
|
||||||
$usage:tt
|
|
||||||
$(
|
|
||||||
$positional_name:tt $positional_ty:tt - $positional_desc:tt
|
|
||||||
)*
|
|
||||||
}) => {{
|
|
||||||
let signature = Signature::new(stringify!($name)).desc($usage);
|
|
||||||
$(
|
|
||||||
$crate::positional! { signature, $positional_name $positional_ty - $positional_desc }
|
|
||||||
)*
|
|
||||||
signature
|
|
||||||
}};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! positional {
|
|
||||||
($ident:tt, $name:tt (optional $shape:tt) - $desc:tt) => {
|
|
||||||
let $ident = $ident.optional(stringify!($name), SyntaxShape::$shape, $desc);
|
|
||||||
};
|
|
||||||
($ident:tt, $name:tt ($shape:tt)- $desc:tt) => {
|
|
||||||
let $ident = $ident.required(stringify!($name), SyntaxShape::$shape, $desc);
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,48 +1,27 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "nu-parser"
|
name = "nu-parser"
|
||||||
version = "0.12.0"
|
version = "0.12.0"
|
||||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
authors = ["The Nu Project Contributors"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
description = "Core parser used in Nushell"
|
|
||||||
license = "MIT"
|
|
||||||
|
|
||||||
[lib]
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
doctest = false
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-errors = { path = "../nu-errors", version = "0.12.0" }
|
num-bigint = { version = "0.2.6", features = ["serde"] }
|
||||||
nu-source = { path = "../nu-source", version = "0.12.0" }
|
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.12.0" }
|
|
||||||
|
|
||||||
pretty_env_logger = "0.3.1"
|
|
||||||
pretty = "0.5.2"
|
|
||||||
termcolor = "1.0.5"
|
|
||||||
log = "0.4.8"
|
|
||||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
|
||||||
serde = { version = "1.0.102", features = ["derive"] }
|
|
||||||
nom = "5.0.1"
|
|
||||||
nom_locate = "1.0.0"
|
|
||||||
nom-tracable = "0.4.1"
|
|
||||||
num-traits = "0.2.8"
|
|
||||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
|
||||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||||
|
indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||||
|
parking_lot = "0.10.0"
|
||||||
|
num-traits = "0.2.11"
|
||||||
derive-new = "0.5.8"
|
derive-new = "0.5.8"
|
||||||
getset = "0.0.9"
|
serde = "1.0.106"
|
||||||
cfg-if = "0.1"
|
|
||||||
itertools = "0.8.1"
|
|
||||||
shellexpand = "1.0.0"
|
|
||||||
ansi_term = "0.12.1"
|
|
||||||
ptree = {version = "0.2" }
|
|
||||||
language-reporting = "0.4.0"
|
language-reporting = "0.4.0"
|
||||||
unicode-xid = "0.2.0"
|
log = "0.4.8"
|
||||||
enumflags2 = "0.6.2"
|
shellexpand = "2.0.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
nu-source = { version = "0.12.0", path = "../nu-source" }
|
||||||
pretty_assertions = "0.6.1"
|
nu-protocol = { version = "0.12.0", path = "../nu-protocol" }
|
||||||
|
nu-errors = { version = "0.12.0", path = "../nu-errors" }
|
||||||
[build-dependencies]
|
|
||||||
nu-build = { version = "0.12.0", path = "../nu-build" }
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
stable = []
|
stable = []
|
||||||
trace = ["nom-tracable/trace"]
|
trace = []
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
nu_build::build()
|
|
||||||
}
|
|
@ -1,34 +0,0 @@
|
|||||||
pub mod classified;
|
|
||||||
|
|
||||||
use crate::commands::classified::external::{ExternalArg, ExternalArgs, ExternalCommand};
|
|
||||||
use crate::commands::classified::ClassifiedCommand;
|
|
||||||
use crate::hir::expand_external_tokens::ExternalTokensShape;
|
|
||||||
use crate::hir::tokens_iterator::TokensIterator;
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_source::{Spanned, Tagged};
|
|
||||||
|
|
||||||
// Classify this command as an external command, which doesn't give special meaning
|
|
||||||
// to nu syntactic constructs, and passes all arguments to the external command as
|
|
||||||
// strings.
|
|
||||||
pub(crate) fn external_command(
|
|
||||||
tokens: &mut TokensIterator,
|
|
||||||
name: Tagged<&str>,
|
|
||||||
) -> Result<ClassifiedCommand, ParseError> {
|
|
||||||
let Spanned { item, span } = tokens.expand_infallible(ExternalTokensShape).tokens;
|
|
||||||
let full_span = name.span().until(span);
|
|
||||||
|
|
||||||
Ok(ClassifiedCommand::External(ExternalCommand {
|
|
||||||
name: name.to_string(),
|
|
||||||
name_tag: name.tag(),
|
|
||||||
args: ExternalArgs {
|
|
||||||
list: item
|
|
||||||
.iter()
|
|
||||||
.map(|x| ExternalArg {
|
|
||||||
tag: x.span.into(),
|
|
||||||
arg: x.item.clone(),
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
span: full_span,
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
}
|
|
@ -1,100 +0,0 @@
|
|||||||
pub mod external;
|
|
||||||
pub mod internal;
|
|
||||||
|
|
||||||
use crate::commands::classified::external::ExternalCommand;
|
|
||||||
use crate::commands::classified::internal::InternalCommand;
|
|
||||||
use crate::hir;
|
|
||||||
use crate::parse::token_tree::SpannedToken;
|
|
||||||
use derive_new::new;
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub enum ClassifiedCommand {
|
|
||||||
#[allow(unused)]
|
|
||||||
Expr(SpannedToken),
|
|
||||||
#[allow(unused)]
|
|
||||||
Dynamic(hir::Call),
|
|
||||||
Internal(InternalCommand),
|
|
||||||
External(ExternalCommand),
|
|
||||||
Error(ParseError),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for ClassifiedCommand {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
match self {
|
|
||||||
ClassifiedCommand::Expr(token) => b::typed("command", token.pretty_debug(source)),
|
|
||||||
ClassifiedCommand::Dynamic(call) => b::typed("command", call.pretty_debug(source)),
|
|
||||||
ClassifiedCommand::Error(_) => b::error("no command"),
|
|
||||||
ClassifiedCommand::Internal(internal) => internal.pretty_debug(source),
|
|
||||||
ClassifiedCommand::External(external) => external.pretty_debug(source),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for ClassifiedCommand {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
ClassifiedCommand::Expr(node) => node.span(),
|
|
||||||
ClassifiedCommand::Internal(command) => command.span(),
|
|
||||||
ClassifiedCommand::Dynamic(call) => call.span,
|
|
||||||
ClassifiedCommand::Error(_) => Span::unknown(),
|
|
||||||
ClassifiedCommand::External(command) => command.span(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(new, Debug, Eq, PartialEq)]
|
|
||||||
pub(crate) struct DynamicCommand {
|
|
||||||
pub(crate) args: hir::Call,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct Commands {
|
|
||||||
pub list: Vec<ClassifiedCommand>,
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::ops::Deref for Commands {
|
|
||||||
type Target = [ClassifiedCommand];
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.list
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ClassifiedPipeline {
|
|
||||||
pub commands: Commands,
|
|
||||||
// this is not a Result to make it crystal clear that these shapes
|
|
||||||
// aren't intended to be used directly with `?`
|
|
||||||
pub failed: Option<nu_errors::ParseError>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClassifiedPipeline {
|
|
||||||
pub fn commands(list: Vec<ClassifiedCommand>, span: impl Into<Span>) -> ClassifiedPipeline {
|
|
||||||
ClassifiedPipeline {
|
|
||||||
commands: Commands {
|
|
||||||
list,
|
|
||||||
span: span.into(),
|
|
||||||
},
|
|
||||||
failed: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for ClassifiedPipeline {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.commands.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for ClassifiedPipeline {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::intersperse(
|
|
||||||
self.commands.iter().map(|c| c.pretty_debug(source)),
|
|
||||||
b::operator(" | "),
|
|
||||||
)
|
|
||||||
.or(b::delimit("<", b::description("empty pipeline"), ">"))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,28 +0,0 @@
|
|||||||
use crate::hir;
|
|
||||||
|
|
||||||
use derive_new::new;
|
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Tag};
|
|
||||||
|
|
||||||
#[derive(new, Debug, Clone, Eq, PartialEq)]
|
|
||||||
pub struct InternalCommand {
|
|
||||||
pub name: String,
|
|
||||||
pub name_tag: Tag,
|
|
||||||
pub args: hir::Call,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for InternalCommand {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::typed(
|
|
||||||
"internal command",
|
|
||||||
b::description(&self.name) + b::space() + self.args.pretty_debug(source),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for InternalCommand {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
let start = self.name_tag.span;
|
|
||||||
|
|
||||||
start.until(self.args.span)
|
|
||||||
}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -1,2 +0,0 @@
|
|||||||
#[cfg(test)]
|
|
||||||
pub mod tests;
|
|
@ -1,301 +0,0 @@
|
|||||||
use crate::commands::classified::{internal::InternalCommand, ClassifiedCommand};
|
|
||||||
use crate::hir::expand_external_tokens::{ExternalTokensShape, ExternalTokensSyntax};
|
|
||||||
use crate::hir::{
|
|
||||||
self, named::NamedValue, syntax_shape::*, Expression, NamedArguments, SpannedExpression,
|
|
||||||
TokensIterator,
|
|
||||||
};
|
|
||||||
use crate::parse::files::Files;
|
|
||||||
use crate::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
|
||||||
use crate::SpannedToken;
|
|
||||||
use derive_new::new;
|
|
||||||
use indexmap::IndexMap;
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_protocol::{outln, PathMember, Signature, SyntaxShape};
|
|
||||||
use nu_source::{HasSpan, PrettyDebugWithSource, Span, SpannedItem, Tag, Text};
|
|
||||||
use pretty_assertions::assert_eq;
|
|
||||||
use std::fmt::Debug;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_external() {
|
|
||||||
parse_tokens(
|
|
||||||
fallible(ExternalTokensShape),
|
|
||||||
"5kb",
|
|
||||||
vec![b::bare("5kb")],
|
|
||||||
|tokens| {
|
|
||||||
ExternalTokensSyntax::new(
|
|
||||||
vec![format!("5kb").spanned(tokens[0].span())].spanned(tokens[0].span()),
|
|
||||||
)
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
parse_tokens(
|
|
||||||
fallible(ExternalTokensShape),
|
|
||||||
"cargo +nightly run -- --features all",
|
|
||||||
vec![
|
|
||||||
b::bare("cargo"),
|
|
||||||
b::sp(),
|
|
||||||
b::external_word("+nightly"),
|
|
||||||
b::sp(),
|
|
||||||
b::bare("run"),
|
|
||||||
b::sp(),
|
|
||||||
b::external_word("--"),
|
|
||||||
b::sp(),
|
|
||||||
b::flag("features"),
|
|
||||||
b::sp(),
|
|
||||||
b::bare("all"),
|
|
||||||
],
|
|
||||||
|tokens| {
|
|
||||||
let cargo = format!("cargo").spanned(tokens[0].span());
|
|
||||||
let nightly = format!("+nightly").spanned(tokens[2].span());
|
|
||||||
let run = format!("run").spanned(tokens[4].span());
|
|
||||||
let dashdash = format!("--").spanned(tokens[6].span());
|
|
||||||
let features = format!("--features").spanned(tokens[8].span());
|
|
||||||
let all = format!("all").spanned(tokens[10].span());
|
|
||||||
let span = tokens[0].span().until(tokens[10].span());
|
|
||||||
|
|
||||||
ExternalTokensSyntax::new(
|
|
||||||
vec![cargo, nightly, run, dashdash, features, all].spanned(span),
|
|
||||||
)
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_string() {
|
|
||||||
parse_tokens(
|
|
||||||
CoerceStringShape,
|
|
||||||
r#""hello""#,
|
|
||||||
vec![b::string("hello")],
|
|
||||||
|tokens| {
|
|
||||||
Expression::string(inner_string_span(tokens[0].span())).into_expr(tokens[0].span())
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_path() {
|
|
||||||
let _ = pretty_env_logger::try_init();
|
|
||||||
|
|
||||||
parse_expr(
|
|
||||||
AnyExpressionShape,
|
|
||||||
"$it.cpu",
|
|
||||||
vec![b::it_var(), b::dot(), b::bare("cpu")],
|
|
||||||
|tokens| {
|
|
||||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
|
||||||
let bare = tokens[2].expect_bare();
|
|
||||||
Expression::path(
|
|
||||||
Expression::it_variable(inner_var).into_expr(outer_var),
|
|
||||||
vec![PathMember::string("cpu", bare)],
|
|
||||||
)
|
|
||||||
.into_expr(outer_var.until(bare))
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
parse_expr(
|
|
||||||
VariablePathShape,
|
|
||||||
r#"$cpu.amount."max ghz""#,
|
|
||||||
vec![
|
|
||||||
b::var("cpu"),
|
|
||||||
b::dot(),
|
|
||||||
b::bare("amount"),
|
|
||||||
b::dot(),
|
|
||||||
b::string("max ghz"),
|
|
||||||
],
|
|
||||||
|tokens| {
|
|
||||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
|
||||||
let amount = tokens[2].expect_bare();
|
|
||||||
let (outer_max_ghz, _) = tokens[4].expect_string();
|
|
||||||
|
|
||||||
Expression::path(
|
|
||||||
Expression::variable(inner_var).into_expr(outer_var),
|
|
||||||
vec![
|
|
||||||
PathMember::string("amount", amount),
|
|
||||||
PathMember::string("max ghz", outer_max_ghz),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
.into_expr(outer_var.until(outer_max_ghz))
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_command() {
|
|
||||||
parse_tokens(
|
|
||||||
fallible(ClassifiedCommandShape),
|
|
||||||
"ls *.txt",
|
|
||||||
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
|
||||||
|tokens| {
|
|
||||||
let bare = tokens[0].expect_bare();
|
|
||||||
let pat = tokens[2].expect_pattern();
|
|
||||||
|
|
||||||
let mut map = IndexMap::new();
|
|
||||||
map.insert("full".to_string(), NamedValue::AbsentSwitch);
|
|
||||||
map.insert("help".to_string(), NamedValue::AbsentSwitch);
|
|
||||||
|
|
||||||
ClassifiedCommand::Internal(InternalCommand::new(
|
|
||||||
"ls".to_string(),
|
|
||||||
Tag {
|
|
||||||
span: bare,
|
|
||||||
anchor: None,
|
|
||||||
},
|
|
||||||
hir::Call {
|
|
||||||
head: Box::new(Expression::Command(bare).into_expr(bare)),
|
|
||||||
positional: Some(vec![Expression::pattern("*.txt").into_expr(pat)]),
|
|
||||||
named: Some(NamedArguments { named: map }),
|
|
||||||
span: bare.until(pat),
|
|
||||||
},
|
|
||||||
))
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, new)]
|
|
||||||
struct TestRegistry {
|
|
||||||
#[new(default)]
|
|
||||||
signatures: indexmap::IndexMap<String, Signature>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TestRegistry {
|
|
||||||
fn insert(&mut self, key: &str, value: Signature) {
|
|
||||||
self.signatures.insert(key.to_string(), value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignatureRegistry for TestRegistry {
|
|
||||||
fn has(&self, name: &str) -> bool {
|
|
||||||
self.signatures.contains_key(name)
|
|
||||||
}
|
|
||||||
fn get(&self, name: &str) -> Option<Signature> {
|
|
||||||
self.signatures.get(name).cloned()
|
|
||||||
}
|
|
||||||
fn clone_box(&self) -> Box<dyn SignatureRegistry> {
|
|
||||||
Box::new(self.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_empty_context(source: &Text, callback: impl FnOnce(ExpandContext)) {
|
|
||||||
let mut registry = TestRegistry::new();
|
|
||||||
registry.insert(
|
|
||||||
"ls",
|
|
||||||
Signature::build("ls")
|
|
||||||
.optional(
|
|
||||||
"path",
|
|
||||||
SyntaxShape::Pattern,
|
|
||||||
"a path to get the directory contents from",
|
|
||||||
)
|
|
||||||
.switch(
|
|
||||||
"full",
|
|
||||||
"list all available columns for each entry",
|
|
||||||
Some('f'),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
callback(ExpandContext::new(Box::new(registry), source, None))
|
|
||||||
}
|
|
||||||
|
|
||||||
trait Expand {}
|
|
||||||
|
|
||||||
fn parse_tokens<T: Eq + HasSpan + PrettyDebugWithSource + Clone + Debug + 'static>(
|
|
||||||
shape: impl ExpandSyntax<Output = Result<T, ParseError>>,
|
|
||||||
syntax: &str,
|
|
||||||
tokens: Vec<CurriedToken>,
|
|
||||||
expected: impl FnOnce(&[SpannedToken]) -> T,
|
|
||||||
) {
|
|
||||||
// let parsed_tokens = parse(syntax);
|
|
||||||
let tokens = b::token_list(tokens);
|
|
||||||
let (tokens, source) = b::build(tokens);
|
|
||||||
let text = Text::from(&source);
|
|
||||||
|
|
||||||
assert_eq!(syntax, source);
|
|
||||||
|
|
||||||
with_empty_context(&text, |context| {
|
|
||||||
let tokens = tokens.expect_list();
|
|
||||||
let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span);
|
|
||||||
|
|
||||||
let expr = iterator.expand_syntax(shape);
|
|
||||||
|
|
||||||
let expr = match expr {
|
|
||||||
Ok(expr) => expr,
|
|
||||||
Err(err) => {
|
|
||||||
outln!("");
|
|
||||||
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
|
||||||
outln!("");
|
|
||||||
|
|
||||||
print_err(err.into(), &iterator.context().source().clone());
|
|
||||||
panic!("Parse failed");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let expected = expected(&tokens.item);
|
|
||||||
|
|
||||||
if expr != expected {
|
|
||||||
outln!("");
|
|
||||||
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
|
||||||
outln!("");
|
|
||||||
|
|
||||||
assert_eq!(expr, expected);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_expr(
|
|
||||||
shape: impl ExpandSyntax<Output = Result<SpannedExpression, ParseError>>,
|
|
||||||
syntax: &str,
|
|
||||||
tokens: Vec<CurriedToken>,
|
|
||||||
expected: impl FnOnce(&[SpannedToken]) -> SpannedExpression,
|
|
||||||
) {
|
|
||||||
// let parsed_tokens = parse(syntax);
|
|
||||||
let tokens = b::token_list(tokens);
|
|
||||||
let (tokens, source) = b::build(tokens);
|
|
||||||
let text = Text::from(&source);
|
|
||||||
|
|
||||||
assert_eq!(syntax, source);
|
|
||||||
|
|
||||||
with_empty_context(&text, |context| {
|
|
||||||
let tokens = tokens.expect_list();
|
|
||||||
let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span);
|
|
||||||
|
|
||||||
let expr = iterator.expand_syntax(shape);
|
|
||||||
|
|
||||||
let expr = match expr {
|
|
||||||
Ok(expr) => expr,
|
|
||||||
Err(err) => {
|
|
||||||
outln!("");
|
|
||||||
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
|
||||||
outln!("");
|
|
||||||
|
|
||||||
print_err(err.into(), &iterator.source());
|
|
||||||
panic!("Parse failed");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let expected = expected(&tokens.item);
|
|
||||||
|
|
||||||
if expr != expected {
|
|
||||||
outln!("");
|
|
||||||
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
|
||||||
outln!("");
|
|
||||||
|
|
||||||
assert_eq!(expr, expected);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn inner_string_span(span: Span) -> Span {
|
|
||||||
Span::new(span.start() + 1, span.end() - 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_err(err: ShellError, source: &Text) {
|
|
||||||
let diag = err.into_diagnostic();
|
|
||||||
|
|
||||||
let writer = termcolor::StandardStream::stderr(termcolor::ColorChoice::Auto);
|
|
||||||
let mut source = source.to_string();
|
|
||||||
source.push_str(" ");
|
|
||||||
let files = Files::new(source);
|
|
||||||
let _ = language_reporting::emit(
|
|
||||||
&mut writer.lock(),
|
|
||||||
&files,
|
|
||||||
&diag,
|
|
||||||
&language_reporting::DefaultConfig,
|
|
||||||
);
|
|
||||||
}
|
|
@ -1,31 +0,0 @@
|
|||||||
use crate::{hir::SpannedExpression, CompareOperator};
|
|
||||||
|
|
||||||
use derive_new::new;
|
|
||||||
use getset::Getters;
|
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Spanned};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
|
||||||
)]
|
|
||||||
#[get = "pub"]
|
|
||||||
pub struct Binary {
|
|
||||||
left: SpannedExpression,
|
|
||||||
op: Spanned<CompareOperator>,
|
|
||||||
right: SpannedExpression,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Binary {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::delimit(
|
|
||||||
"<",
|
|
||||||
self.left.pretty_debug(source)
|
|
||||||
+ b::space()
|
|
||||||
+ b::keyword(self.op.span.slice(source))
|
|
||||||
+ b::space()
|
|
||||||
+ self.right.pretty_debug(source),
|
|
||||||
">",
|
|
||||||
)
|
|
||||||
.group()
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,175 +0,0 @@
|
|||||||
use crate::parse::token_tree::Token;
|
|
||||||
use crate::{
|
|
||||||
hir::syntax_shape::{ExpandSyntax, FlatShape, MaybeSpaceShape},
|
|
||||||
TokensIterator,
|
|
||||||
};
|
|
||||||
use derive_new::new;
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_protocol::SpannedTypeName;
|
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem};
|
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Clone, new)]
|
|
||||||
pub struct ExternalTokensSyntax {
|
|
||||||
pub tokens: Spanned<Vec<Spanned<String>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for ExternalTokensSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.tokens.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for ExternalTokensSyntax {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
b::intersperse(
|
|
||||||
self.tokens
|
|
||||||
.iter()
|
|
||||||
.map(|token| b::primitive(format!("{:?}", token.item))),
|
|
||||||
b::space(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ExternalTokensShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ExternalTokensShape {
|
|
||||||
type Output = ExternalTokensSyntax;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"external tokens"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> ExternalTokensSyntax {
|
|
||||||
let mut out: Vec<Spanned<String>> = vec![];
|
|
||||||
|
|
||||||
let start = token_nodes.span_at_cursor();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
match token_nodes.expand_syntax(ExternalExpressionShape) {
|
|
||||||
Err(_) => break,
|
|
||||||
Ok(span) => out.push(span.spanned_string(&token_nodes.source())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let end = token_nodes.span_at_cursor();
|
|
||||||
|
|
||||||
ExternalTokensSyntax {
|
|
||||||
tokens: out.spanned(start.until(end)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ExternalExpressionShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ExternalExpressionShape {
|
|
||||||
type Output = Result<Span, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"external expression"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
|
||||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
|
||||||
|
|
||||||
let first = token_nodes.expand_syntax(ExternalStartToken)?;
|
|
||||||
let mut last = first;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let continuation = token_nodes.expand_syntax(ExternalStartToken);
|
|
||||||
|
|
||||||
if let Ok(continuation) = continuation {
|
|
||||||
last = continuation;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(first.until(last))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct ExternalStartToken;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ExternalStartToken {
|
|
||||||
type Output = Result<Span, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"external start token"
|
|
||||||
}
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
let mut span: Option<Span> = None;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let boundary = token_nodes.expand_infallible(PeekExternalBoundary);
|
|
||||||
|
|
||||||
if boundary {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let peeked = token_nodes.peek().not_eof("external start token")?;
|
|
||||||
let node = peeked.node;
|
|
||||||
|
|
||||||
let new_span = match node.unspanned() {
|
|
||||||
Token::Comment(_)
|
|
||||||
| Token::Separator
|
|
||||||
| Token::Whitespace
|
|
||||||
| Token::Pipeline(_) => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
"external start token",
|
|
||||||
node.spanned_type_name(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {
|
|
||||||
let node = peeked.commit();
|
|
||||||
node.span()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
span = match span {
|
|
||||||
None => Some(new_span),
|
|
||||||
Some(before) => Some(before.until(new_span)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
match span {
|
|
||||||
None => Err(token_nodes.err_next_token("external start token")),
|
|
||||||
Some(span) => {
|
|
||||||
token_nodes.color_shape(FlatShape::ExternalWord.spanned(span));
|
|
||||||
Ok(span)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct PeekExternalBoundary;
|
|
||||||
|
|
||||||
impl ExpandSyntax for PeekExternalBoundary {
|
|
||||||
type Output = bool;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"external boundary"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output {
|
|
||||||
let next = token_nodes.peek();
|
|
||||||
|
|
||||||
match next.node {
|
|
||||||
None => true,
|
|
||||||
Some(node) => match node.unspanned() {
|
|
||||||
Token::Delimited(_) => true,
|
|
||||||
Token::Whitespace => true,
|
|
||||||
Token::Comment(_) => true,
|
|
||||||
Token::Separator => true,
|
|
||||||
Token::Call(_) => true,
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,12 +0,0 @@
|
|||||||
use derive_new::new;
|
|
||||||
use getset::Getters;
|
|
||||||
use nu_source::Span;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
|
||||||
)]
|
|
||||||
#[get = "pub"]
|
|
||||||
pub struct ExternalCommand {
|
|
||||||
pub(crate) name: Span,
|
|
||||||
}
|
|
@ -1,119 +0,0 @@
|
|||||||
use crate::hir::SpannedExpression;
|
|
||||||
use crate::Flag;
|
|
||||||
use indexmap::IndexMap;
|
|
||||||
use log::trace;
|
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebugRefineKind, PrettyDebugWithSource, Tag};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub enum NamedValue {
|
|
||||||
AbsentSwitch,
|
|
||||||
PresentSwitch(Tag),
|
|
||||||
AbsentValue,
|
|
||||||
Value(SpannedExpression),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for NamedValue {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
match self {
|
|
||||||
NamedValue::AbsentSwitch => b::typed("switch", b::description("absent")),
|
|
||||||
NamedValue::PresentSwitch(_) => b::typed("switch", b::description("present")),
|
|
||||||
NamedValue::AbsentValue => b::description("absent"),
|
|
||||||
NamedValue::Value(value) => value.pretty_debug(source),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
|
||||||
match refine {
|
|
||||||
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
|
||||||
PrettyDebugRefineKind::WithContext => match self {
|
|
||||||
NamedValue::AbsentSwitch => b::value("absent"),
|
|
||||||
NamedValue::PresentSwitch(_) => b::value("present"),
|
|
||||||
NamedValue::AbsentValue => b::value("absent"),
|
|
||||||
NamedValue::Value(value) => value.refined_pretty_debug(refine, source),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub struct NamedArguments {
|
|
||||||
pub named: IndexMap<String, NamedValue>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl NamedArguments {
|
|
||||||
pub fn new() -> NamedArguments {
|
|
||||||
Default::default()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn iter(&self) -> impl Iterator<Item = (&String, &NamedValue)> {
|
|
||||||
self.named.iter()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(&self, name: &str) -> Option<&NamedValue> {
|
|
||||||
self.named.get(name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl NamedArguments {
|
|
||||||
pub fn insert_switch(&mut self, name: impl Into<String>, switch: Option<Flag>) {
|
|
||||||
let name = name.into();
|
|
||||||
trace!("Inserting switch -- {} = {:?}", name, switch);
|
|
||||||
|
|
||||||
match switch {
|
|
||||||
None => self.named.insert(name, NamedValue::AbsentSwitch),
|
|
||||||
Some(flag) => self.named.insert(
|
|
||||||
name,
|
|
||||||
NamedValue::PresentSwitch(Tag {
|
|
||||||
span: *flag.name(),
|
|
||||||
anchor: None,
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert_optional(&mut self, name: impl Into<String>, expr: Option<SpannedExpression>) {
|
|
||||||
match expr {
|
|
||||||
None => self.named.insert(name.into(), NamedValue::AbsentValue),
|
|
||||||
Some(expr) => self.named.insert(name.into(), NamedValue::Value(expr)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert_mandatory(&mut self, name: impl Into<String>, expr: SpannedExpression) {
|
|
||||||
self.named.insert(name.into(), NamedValue::Value(expr));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn switch_present(&self, switch: &str) -> bool {
|
|
||||||
self.named
|
|
||||||
.get(switch)
|
|
||||||
.map(|t| match t {
|
|
||||||
NamedValue::PresentSwitch(_) => true,
|
|
||||||
_ => false,
|
|
||||||
})
|
|
||||||
.unwrap_or(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for NamedArguments {
|
|
||||||
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
|
||||||
match refine {
|
|
||||||
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
|
||||||
PrettyDebugRefineKind::WithContext => b::intersperse(
|
|
||||||
self.named.iter().map(|(key, value)| {
|
|
||||||
b::key(key)
|
|
||||||
+ b::equals()
|
|
||||||
+ value.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source)
|
|
||||||
}),
|
|
||||||
b::space(),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::delimit(
|
|
||||||
"(",
|
|
||||||
self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source),
|
|
||||||
")",
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,41 +0,0 @@
|
|||||||
use crate::hir::SpannedExpression;
|
|
||||||
use derive_new::new;
|
|
||||||
use getset::{Getters, MutGetters};
|
|
||||||
use nu_protocol::PathMember;
|
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Debug,
|
|
||||||
Clone,
|
|
||||||
Eq,
|
|
||||||
PartialEq,
|
|
||||||
Ord,
|
|
||||||
PartialOrd,
|
|
||||||
Hash,
|
|
||||||
Getters,
|
|
||||||
MutGetters,
|
|
||||||
Serialize,
|
|
||||||
Deserialize,
|
|
||||||
new,
|
|
||||||
)]
|
|
||||||
#[get = "pub"]
|
|
||||||
pub struct Path {
|
|
||||||
head: SpannedExpression,
|
|
||||||
#[get_mut = "pub(crate)"]
|
|
||||||
tail: Vec<PathMember>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Path {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
self.head.pretty_debug(source)
|
|
||||||
+ b::operator(".")
|
|
||||||
+ b::intersperse(self.tail.iter().map(|m| m.pretty()), b::operator("."))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Path {
|
|
||||||
pub(crate) fn parts(self) -> (SpannedExpression, Vec<PathMember>) {
|
|
||||||
(self.head, self.tail)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,33 +0,0 @@
|
|||||||
use crate::hir::SpannedExpression;
|
|
||||||
|
|
||||||
use derive_new::new;
|
|
||||||
use getset::Getters;
|
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
|
||||||
)]
|
|
||||||
pub struct Range {
|
|
||||||
#[get = "pub"]
|
|
||||||
left: SpannedExpression,
|
|
||||||
#[get = "pub"]
|
|
||||||
dotdot: Span,
|
|
||||||
#[get = "pub"]
|
|
||||||
right: SpannedExpression,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Range {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::delimit(
|
|
||||||
"<",
|
|
||||||
self.left.pretty_debug(source)
|
|
||||||
+ b::space()
|
|
||||||
+ b::keyword(self.dotdot.slice(source))
|
|
||||||
+ b::space()
|
|
||||||
+ self.right.pretty_debug(source),
|
|
||||||
">",
|
|
||||||
)
|
|
||||||
.group()
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,475 +0,0 @@
|
|||||||
use crate::hir;
|
|
||||||
use crate::hir::syntax_shape::{
|
|
||||||
ExpandSyntax, expand_atom, expand_syntax, BareShape, ExpandContext, ExpandSyntax, ExpansionRule,
|
|
||||||
UnspannedAtomicToken, WhitespaceShape,
|
|
||||||
};
|
|
||||||
use crate::hir::tokens_iterator::TokensIterator;
|
|
||||||
use crate::parse::comment::Comment;
|
|
||||||
use derive_new::new;
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_protocol::{RowType, SpannedTypeName, Type};
|
|
||||||
use nu_source::{
|
|
||||||
b, DebugDocBuilder, HasFallibleSpan, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
|
||||||
};
|
|
||||||
use std::fmt::Debug;
|
|
||||||
|
|
||||||
// A Signature is a command without implementation.
|
|
||||||
//
|
|
||||||
// In Nu, a command is a function combined with macro expansion rules.
|
|
||||||
//
|
|
||||||
// def cd
|
|
||||||
// # Change to a new path.
|
|
||||||
// optional directory(Path) # the directory to change to
|
|
||||||
// end
|
|
||||||
|
|
||||||
#[derive(new)]
|
|
||||||
struct Expander<'a, 'b, 'c, 'd> {
|
|
||||||
iterator: &'b mut TokensIterator<'a>,
|
|
||||||
context: &'d ExpandContext<'c>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, 'b, 'c, 'd> Expander<'a, 'b, 'c, 'd> {
|
|
||||||
fn expand<O>(&mut self, syntax: impl ExpandSyntax<Output = O>) -> Result<O, ParseError>
|
|
||||||
where
|
|
||||||
O: HasFallibleSpan + Clone + std::fmt::Debug + 'static,
|
|
||||||
{
|
|
||||||
expand_syntax(&syntax, self.iterator, self.context)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn optional<O>(&mut self, syntax: impl ExpandSyntax<Output = O>) -> Option<O>
|
|
||||||
where
|
|
||||||
O: HasFallibleSpan + Clone + std::fmt::Debug + 'static,
|
|
||||||
{
|
|
||||||
match expand_syntax(&syntax, self.iterator, self.context) {
|
|
||||||
Err(_) => None,
|
|
||||||
Ok(value) => Some(value),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pos(&mut self) -> Span {
|
|
||||||
self.iterator.span_at_cursor()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn slice_string(&mut self, span: impl Into<Span>) -> String {
|
|
||||||
span.into().slice(self.context.source()).to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct SignatureShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for SignatureShape {
|
|
||||||
type Output = hir::Signature;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"signature"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
let mut expander = Expander::new(token_nodes, context);
|
|
||||||
let start = expander.pos();
|
|
||||||
expander.expand(keyword("def"))?;
|
|
||||||
expander.expand(WhitespaceShape)?;
|
|
||||||
let name = expander.expand(BareShape)?;
|
|
||||||
expander.expand(SeparatorShape)?;
|
|
||||||
let usage = expander.expand(CommentShape)?;
|
|
||||||
expander.expand(SeparatorShape)?;
|
|
||||||
let end = expander.pos();
|
|
||||||
|
|
||||||
Ok(hir::Signature::new(
|
|
||||||
nu_protocol::Signature::new(&name.word).desc(expander.slice_string(usage.text)),
|
|
||||||
start.until(end),
|
|
||||||
))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn keyword(kw: &'static str) -> KeywordShape {
|
|
||||||
KeywordShape { keyword: kw }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct KeywordShape {
|
|
||||||
keyword: &'static str,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandSyntax for KeywordShape {
|
|
||||||
type Output = Span;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"keyword"
|
|
||||||
}
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
let atom = expand_atom(token_nodes, "keyword", context, ExpansionRule::new())?;
|
|
||||||
|
|
||||||
if let UnspannedAtomicToken::Word { text } = &atom.unspanned {
|
|
||||||
let word = text.slice(context.source());
|
|
||||||
|
|
||||||
if word == self.keyword {
|
|
||||||
return Ok(atom.span);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(ParseError::mismatch(self.keyword, atom.spanned_type_name()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct SeparatorShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for SeparatorShape {
|
|
||||||
type Output = Span;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"separator"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
let atom = expand_atom(token_nodes, "separator", context, ExpansionRule::new())?;
|
|
||||||
|
|
||||||
match &atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Separator { text } => Ok(*text),
|
|
||||||
_ => Err(ParseError::mismatch("separator", atom.spanned_type_name())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct CommentShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for CommentShape {
|
|
||||||
type Output = Comment;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"comment"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
let atom = expand_atom(token_nodes, "comment", context, ExpansionRule::new())?;
|
|
||||||
|
|
||||||
match &atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Comment { body } => Ok(Comment::line(body, atom.span)),
|
|
||||||
_ => Err(ParseError::mismatch("separator", atom.spanned_type_name())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, new)]
|
|
||||||
struct TupleShape<A, B> {
|
|
||||||
first: A,
|
|
||||||
second: B,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, new)]
|
|
||||||
struct TupleSyntax<A, B> {
|
|
||||||
first: A,
|
|
||||||
second: B,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<A, B> PrettyDebugWithSource for TupleSyntax<A, B>
|
|
||||||
where
|
|
||||||
A: PrettyDebugWithSource,
|
|
||||||
B: PrettyDebugWithSource,
|
|
||||||
{
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::typed(
|
|
||||||
"pair",
|
|
||||||
self.first.pretty_debug(source) + b::space() + self.second.pretty_debug(source),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<A, B> HasFallibleSpan for TupleSyntax<A, B>
|
|
||||||
where
|
|
||||||
A: HasFallibleSpan + Debug + Clone,
|
|
||||||
B: HasFallibleSpan + Debug + Clone,
|
|
||||||
{
|
|
||||||
fn maybe_span(&self) -> Option<Span> {
|
|
||||||
match (self.first.maybe_span(), self.second.maybe_span()) {
|
|
||||||
(Some(first), Some(second)) => Some(first.until(second)),
|
|
||||||
(Some(first), None) => Some(first),
|
|
||||||
(None, Some(second)) => Some(second),
|
|
||||||
(None, None) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<A, B, AOut, BOut> ExpandSyntax for TupleShape<A, B>
|
|
||||||
where
|
|
||||||
A: ExpandSyntax<Output = AOut> + Debug + Copy,
|
|
||||||
B: ExpandSyntax<Output = BOut> + Debug + Copy,
|
|
||||||
AOut: HasFallibleSpan + Debug + Clone + 'static,
|
|
||||||
BOut: HasFallibleSpan + Debug + Clone + 'static,
|
|
||||||
{
|
|
||||||
type Output = TupleSyntax<AOut, BOut>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"pair"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
let first = expand_syntax(&self.first, token_nodes, context)?;
|
|
||||||
let second = expand_syntax(&self.second, token_nodes, context)?;
|
|
||||||
|
|
||||||
Ok(TupleSyntax { first, second })
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct PositionalParam {
|
|
||||||
optional: Option<Span>,
|
|
||||||
name: Identifier,
|
|
||||||
ty: Spanned<Type>,
|
|
||||||
desc: Spanned<String>,
|
|
||||||
span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for PositionalParam {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for PositionalParam {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
(match self.optional {
|
|
||||||
Some(_) => b::description("optional") + b::space(),
|
|
||||||
None => b::blank(),
|
|
||||||
}) + self.ty.pretty_debug(source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct PositionalParamShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for PositionalParamShape {
|
|
||||||
type Output = PositionalParam;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"positional param"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
let mut expander = Expander::new(token_nodes, context);
|
|
||||||
|
|
||||||
let optional = expander
|
|
||||||
.optional(TupleShape::new(keyword("optional"), WhitespaceShape))
|
|
||||||
.map(|s| s.first);
|
|
||||||
|
|
||||||
let name = expander.expand(IdentifierShape)?;
|
|
||||||
|
|
||||||
expander.optional(WhitespaceShape);
|
|
||||||
|
|
||||||
let _ty = expander.expand(TypeShape)?;
|
|
||||||
|
|
||||||
Ok(PositionalParam {
|
|
||||||
optional,
|
|
||||||
name,
|
|
||||||
ty: Type::Nothing.spanned(Span::unknown()),
|
|
||||||
desc: format!("").spanned(Span::unknown()),
|
|
||||||
span: Span::unknown(),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
struct Identifier {
|
|
||||||
body: String,
|
|
||||||
span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for Identifier {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Identifier {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::typed("id", b::description(self.span.slice(source)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct IdentifierShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for IdentifierShape {
|
|
||||||
type Output = Identifier;
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"identifier"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
let atom = expand_atom(token_nodes, "identifier", context, ExpansionRule::new())?;
|
|
||||||
|
|
||||||
if let UnspannedAtomicToken::Word { text } = atom.unspanned {
|
|
||||||
let body = text.slice(context.source());
|
|
||||||
if is_id(body) {
|
|
||||||
return Ok(Identifier {
|
|
||||||
body: body.to_string(),
|
|
||||||
span: text,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(ParseError::mismatch("identifier", atom.spanned_type_name()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_id(input: &str) -> bool {
|
|
||||||
let source = nu_source::nom_input(input);
|
|
||||||
match crate::parse::parser::ident(source) {
|
|
||||||
Err(_) => false,
|
|
||||||
Ok((input, _)) => input.fragment.is_empty(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, new)]
|
|
||||||
struct TypeSyntax {
|
|
||||||
ty: Type,
|
|
||||||
span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for TypeSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for TypeSyntax {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
self.ty.pretty_debug(source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct TypeShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for TypeShape {
|
|
||||||
type Output = TypeSyntax;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"type"
|
|
||||||
}
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
let atom = expand_atom(token_nodes, "type", context, ExpansionRule::new())?;
|
|
||||||
|
|
||||||
match atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Word { text } => {
|
|
||||||
let word = text.slice(context.source());
|
|
||||||
|
|
||||||
Ok(TypeSyntax::new(
|
|
||||||
match word {
|
|
||||||
"nothing" => Type::Nothing,
|
|
||||||
"integer" => Type::Int,
|
|
||||||
"decimal" => Type::Decimal,
|
|
||||||
"bytesize" => Type::Bytesize,
|
|
||||||
"string" => Type::String,
|
|
||||||
"column-path" => Type::ColumnPath,
|
|
||||||
"pattern" => Type::Pattern,
|
|
||||||
"boolean" => Type::Boolean,
|
|
||||||
"date" => Type::Date,
|
|
||||||
"duration" => Type::Duration,
|
|
||||||
"filename" => Type::Path,
|
|
||||||
"binary" => Type::Binary,
|
|
||||||
"row" => Type::Row(RowType::new()),
|
|
||||||
"table" => Type::Table(vec![]),
|
|
||||||
"block" => Type::Block,
|
|
||||||
_ => return Err(ParseError::mismatch("type", atom.spanned_type_name())),
|
|
||||||
},
|
|
||||||
atom.span,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
_ => Err(ParseError::mismatch("type", atom.spanned_type_name())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct TypeAnnotation;
|
|
||||||
|
|
||||||
impl ExpandSyntax for TypeAnnotation {
|
|
||||||
type Output = TypeSyntax;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"type annotation"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"type annotation",
|
|
||||||
context,
|
|
||||||
ExpansionRule::new(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
match atom.unspanned {
|
|
||||||
UnspannedAtomicToken::RoundDelimited { nodes, .. } => {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
token_nodes.child(
|
|
||||||
(&nodes[..]).spanned(atom.span),
|
|
||||||
context.source().clone(),
|
|
||||||
|token_nodes| {
|
|
||||||
let ty = expand_syntax(&TypeShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
let next = token_nodes.peek_non_ws();
|
|
||||||
|
|
||||||
match next.node {
|
|
||||||
None => Ok(ty),
|
|
||||||
Some(node) => {
|
|
||||||
Err(ParseError::extra_tokens(node.spanned_type_name()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => Err(ParseError::mismatch(
|
|
||||||
"type annotation",
|
|
||||||
atom.spanned_type_name(),
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,680 +0,0 @@
|
|||||||
#![allow(clippy::large_enum_variant, clippy::type_complexity)]
|
|
||||||
|
|
||||||
mod block;
|
|
||||||
mod expression;
|
|
||||||
pub mod flat_shape;
|
|
||||||
|
|
||||||
use crate::commands::classified::internal::InternalCommand;
|
|
||||||
use crate::commands::classified::{ClassifiedCommand, ClassifiedPipeline};
|
|
||||||
use crate::commands::external_command;
|
|
||||||
use crate::hir;
|
|
||||||
use crate::hir::syntax_shape::block::CoerceBlockShape;
|
|
||||||
use crate::hir::syntax_shape::expression::range::RangeShape;
|
|
||||||
use crate::hir::syntax_shape::flat_shape::ShapeResult;
|
|
||||||
use crate::hir::tokens_iterator::TokensIterator;
|
|
||||||
use crate::hir::{Expression, SpannedExpression};
|
|
||||||
use crate::parse::operator::EvaluationOperator;
|
|
||||||
use crate::parse::token_tree::{
|
|
||||||
ExternalCommandType, PipelineType, SpannedToken, Token, WhitespaceType, WordType,
|
|
||||||
};
|
|
||||||
use crate::parse_command::parse_command_tail;
|
|
||||||
use derive_new::new;
|
|
||||||
use getset::Getters;
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_protocol::{ShellTypeName, Signature, SpannedTypeName};
|
|
||||||
use nu_source::{
|
|
||||||
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
|
||||||
Tag, TaggedItem, Text,
|
|
||||||
};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
pub(crate) use self::expression::delimited::DelimitedSquareShape;
|
|
||||||
pub(crate) use self::expression::file_path::{ExternalWordShape, FilePathShape};
|
|
||||||
pub(crate) use self::expression::list::{BackoffColoringMode, ExpressionListShape};
|
|
||||||
pub(crate) use self::expression::number::{
|
|
||||||
DecimalShape, IntExpressionShape, IntShape, NumberExpressionShape, NumberShape,
|
|
||||||
};
|
|
||||||
pub(crate) use self::expression::pattern::{PatternExpressionShape, PatternShape};
|
|
||||||
pub(crate) use self::expression::string::{CoerceStringShape, StringExpressionShape, StringShape};
|
|
||||||
pub(crate) use self::expression::unit::UnitExpressionShape;
|
|
||||||
pub(crate) use self::expression::variable_path::{
|
|
||||||
ColumnPathShape, ColumnPathSyntax, ExpressionContinuationShape, Member, MemberShape,
|
|
||||||
PathTailShape, PathTailSyntax, VariablePathShape, VariableShape,
|
|
||||||
};
|
|
||||||
pub(crate) use self::expression::{AnyExpressionShape, AnyExpressionStartShape};
|
|
||||||
pub(crate) use self::flat_shape::FlatShape;
|
|
||||||
|
|
||||||
use nu_protocol::SyntaxShape;
|
|
||||||
use std::fmt::Debug;
|
|
||||||
|
|
||||||
impl ExpandSyntax for SyntaxShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
SyntaxShape::Any => "shape[any]",
|
|
||||||
SyntaxShape::Int => "shape[integer]",
|
|
||||||
SyntaxShape::Range => "shape[range]",
|
|
||||||
SyntaxShape::String => "shape[string]",
|
|
||||||
SyntaxShape::Member => "shape[column name]",
|
|
||||||
SyntaxShape::ColumnPath => "shape[column path]",
|
|
||||||
SyntaxShape::Number => "shape[number]",
|
|
||||||
SyntaxShape::Path => "shape[file path]",
|
|
||||||
SyntaxShape::Pattern => "shape[glob pattern]",
|
|
||||||
SyntaxShape::Block => "shape[block]",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
match self {
|
|
||||||
SyntaxShape::Any => token_nodes.expand_syntax(AnyExpressionShape),
|
|
||||||
SyntaxShape::Int => token_nodes
|
|
||||||
.expand_syntax(IntExpressionShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
|
||||||
SyntaxShape::Range => token_nodes
|
|
||||||
.expand_syntax(RangeShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
|
||||||
SyntaxShape::String => token_nodes
|
|
||||||
.expand_syntax(CoerceStringShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
|
||||||
SyntaxShape::Member => {
|
|
||||||
let syntax = token_nodes.expand_syntax(MemberShape)?;
|
|
||||||
Ok(syntax.to_expr())
|
|
||||||
}
|
|
||||||
SyntaxShape::ColumnPath => {
|
|
||||||
let column_path = token_nodes.expand_syntax(ColumnPathShape)?;
|
|
||||||
let ColumnPathSyntax {
|
|
||||||
path: column_path,
|
|
||||||
tag,
|
|
||||||
} = column_path;
|
|
||||||
|
|
||||||
Ok(Expression::column_path(column_path).into_expr(tag.span))
|
|
||||||
}
|
|
||||||
SyntaxShape::Number => token_nodes
|
|
||||||
.expand_syntax(NumberExpressionShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
|
||||||
SyntaxShape::Path => token_nodes
|
|
||||||
.expand_syntax(FilePathShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
|
||||||
SyntaxShape::Pattern => token_nodes
|
|
||||||
.expand_syntax(PatternShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
|
||||||
SyntaxShape::Block => token_nodes
|
|
||||||
.expand_syntax(CoerceBlockShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(VariablePathShape)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait SignatureRegistry: Debug {
|
|
||||||
fn has(&self, name: &str) -> bool;
|
|
||||||
fn get(&self, name: &str) -> Option<Signature>;
|
|
||||||
fn clone_box(&self) -> Box<dyn SignatureRegistry>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignatureRegistry for Box<dyn SignatureRegistry> {
|
|
||||||
fn has(&self, name: &str) -> bool {
|
|
||||||
(&**self).has(name)
|
|
||||||
}
|
|
||||||
fn get(&self, name: &str) -> Option<Signature> {
|
|
||||||
(&**self).get(name)
|
|
||||||
}
|
|
||||||
fn clone_box(&self) -> Box<dyn SignatureRegistry> {
|
|
||||||
(&**self).clone_box()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Getters, new)]
|
|
||||||
pub struct ExpandContext<'context> {
|
|
||||||
#[get = "pub(crate)"]
|
|
||||||
pub registry: Box<dyn SignatureRegistry>,
|
|
||||||
pub source: &'context Text,
|
|
||||||
pub homedir: Option<PathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'context> ExpandContext<'context> {
|
|
||||||
pub(crate) fn homedir(&self) -> Option<&Path> {
|
|
||||||
self.homedir.as_deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn source(&self) -> &'context Text {
|
|
||||||
self.source
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait ExpandSyntax: std::fmt::Debug + Clone {
|
|
||||||
type Output: Clone + std::fmt::Debug + 'static;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str;
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn fallible<T, S>(syntax: S) -> FallibleSyntax<S>
|
|
||||||
where
|
|
||||||
T: Clone + Debug + 'static,
|
|
||||||
S: ExpandSyntax<Output = T>,
|
|
||||||
{
|
|
||||||
FallibleSyntax { inner: syntax }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct FallibleSyntax<I> {
|
|
||||||
inner: I,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<I, T> ExpandSyntax for FallibleSyntax<I>
|
|
||||||
where
|
|
||||||
I: ExpandSyntax<Output = T>,
|
|
||||||
T: Clone + Debug + 'static,
|
|
||||||
{
|
|
||||||
type Output = Result<T, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"fallible"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<T, ParseError> {
|
|
||||||
Ok(self.inner.expand(token_nodes))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
enum BarePathState {
|
|
||||||
Initial,
|
|
||||||
Seen(Span, Span),
|
|
||||||
Error(ParseError),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BarePathState {
|
|
||||||
pub fn seen(self, span: Span) -> BarePathState {
|
|
||||||
match self {
|
|
||||||
BarePathState::Initial => BarePathState::Seen(span, span),
|
|
||||||
BarePathState::Seen(start, _) => BarePathState::Seen(start, span),
|
|
||||||
BarePathState::Error(err) => BarePathState::Error(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn end(self, node: Option<&SpannedToken>, expected: &'static str) -> BarePathState {
|
|
||||||
match self {
|
|
||||||
BarePathState::Initial => match node {
|
|
||||||
None => BarePathState::Error(ParseError::unexpected_eof(expected, Span::unknown())),
|
|
||||||
Some(token) => {
|
|
||||||
BarePathState::Error(ParseError::mismatch(expected, token.spanned_type_name()))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
BarePathState::Seen(start, end) => BarePathState::Seen(start, end),
|
|
||||||
BarePathState::Error(err) => BarePathState::Error(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn into_bare(self) -> Result<Span, ParseError> {
|
|
||||||
match self {
|
|
||||||
BarePathState::Initial => unreachable!("into_bare in initial state"),
|
|
||||||
BarePathState::Seen(start, end) => Ok(start.until(end)),
|
|
||||||
BarePathState::Error(err) => Err(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expand_bare(
|
|
||||||
token_nodes: &'_ mut TokensIterator<'_>,
|
|
||||||
predicate: impl Fn(&SpannedToken) -> bool,
|
|
||||||
) -> Result<Span, ParseError> {
|
|
||||||
let mut state = BarePathState::Initial;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
state = state.end(None, "word");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let source = token_nodes.source();
|
|
||||||
|
|
||||||
let mut peeked = token_nodes.peek();
|
|
||||||
let node = peeked.node;
|
|
||||||
|
|
||||||
match node {
|
|
||||||
Some(token) if predicate(token) => {
|
|
||||||
peeked.commit();
|
|
||||||
state = state.seen(token.span());
|
|
||||||
let shapes = FlatShape::shapes(token, &source);
|
|
||||||
token_nodes.color_shapes(shapes);
|
|
||||||
}
|
|
||||||
token => {
|
|
||||||
state = state.end(token, "word");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
state.into_bare()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct BareExpressionShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for BareExpressionShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"bare expression"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(BarePathShape)
|
|
||||||
.map(|span| Expression::bare().into_expr(span))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct BarePathShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for BarePathShape {
|
|
||||||
type Output = Result<Span, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"bare path"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
|
||||||
expand_bare(token_nodes, |token| match token.unspanned() {
|
|
||||||
Token::Bare | Token::EvaluationOperator(EvaluationOperator::Dot) => true,
|
|
||||||
|
|
||||||
_ => false,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct BareShape;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct BareSyntax {
|
|
||||||
pub word: String,
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for BareSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for BareSyntax {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
b::primitive(&self.word)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandSyntax for BareShape {
|
|
||||||
type Output = Result<BareSyntax, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"word"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<BareSyntax, ParseError> {
|
|
||||||
let source = token_nodes.source();
|
|
||||||
|
|
||||||
token_nodes.expand_token(WordType, |span| {
|
|
||||||
Ok((
|
|
||||||
FlatShape::Word,
|
|
||||||
BareSyntax {
|
|
||||||
word: span.string(&source),
|
|
||||||
span,
|
|
||||||
},
|
|
||||||
))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum CommandSignature {
|
|
||||||
Internal(Spanned<Signature>),
|
|
||||||
LiteralExternal { outer: Span, inner: Span },
|
|
||||||
External(Span),
|
|
||||||
Expression(hir::SpannedExpression),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for CommandSignature {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
match self {
|
|
||||||
CommandSignature::Internal(internal) => {
|
|
||||||
b::typed("command", b::description(&internal.name))
|
|
||||||
}
|
|
||||||
CommandSignature::LiteralExternal { outer, .. } => {
|
|
||||||
b::typed("command", b::description(outer.slice(source)))
|
|
||||||
}
|
|
||||||
CommandSignature::External(external) => b::typed(
|
|
||||||
"command",
|
|
||||||
b::description("^") + b::description(external.slice(source)),
|
|
||||||
),
|
|
||||||
CommandSignature::Expression(expr) => b::typed("command", expr.pretty_debug(source)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for CommandSignature {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
CommandSignature::Internal(spanned) => spanned.span,
|
|
||||||
CommandSignature::LiteralExternal { outer, .. } => *outer,
|
|
||||||
CommandSignature::External(span) => *span,
|
|
||||||
CommandSignature::Expression(expr) => expr.span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CommandSignature {
|
|
||||||
pub fn to_expression(&self) -> hir::SpannedExpression {
|
|
||||||
match self {
|
|
||||||
CommandSignature::Internal(command) => {
|
|
||||||
let span = command.span;
|
|
||||||
hir::Expression::Command(span).into_expr(span)
|
|
||||||
}
|
|
||||||
CommandSignature::LiteralExternal { outer, inner } => {
|
|
||||||
hir::Expression::ExternalCommand(hir::ExternalCommand::new(*inner))
|
|
||||||
.into_expr(*outer)
|
|
||||||
}
|
|
||||||
CommandSignature::External(span) => {
|
|
||||||
hir::Expression::ExternalCommand(hir::ExternalCommand::new(*span)).into_expr(*span)
|
|
||||||
}
|
|
||||||
CommandSignature::Expression(expr) => expr.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct PipelineShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for PipelineShape {
|
|
||||||
type Output = ClassifiedPipeline;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"pipeline"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'content, 'me>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'me mut TokensIterator<'content>,
|
|
||||||
) -> ClassifiedPipeline {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
return ClassifiedPipeline::commands(vec![], Span::unknown());
|
|
||||||
}
|
|
||||||
|
|
||||||
let start = token_nodes.span_at_cursor();
|
|
||||||
|
|
||||||
// whitespace is allowed at the beginning
|
|
||||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
|
||||||
|
|
||||||
let pipeline = token_nodes
|
|
||||||
.expand_token(PipelineType, |pipeline| Ok(((), pipeline)))
|
|
||||||
.expect("PipelineShape is only expected to be called with a Pipeline token");
|
|
||||||
|
|
||||||
let parts = &pipeline.parts[..];
|
|
||||||
|
|
||||||
let mut out = vec![];
|
|
||||||
|
|
||||||
for part in parts {
|
|
||||||
if let Some(span) = part.pipe {
|
|
||||||
token_nodes.color_shape(FlatShape::Pipe.spanned(span));
|
|
||||||
}
|
|
||||||
|
|
||||||
let tokens: Spanned<&[SpannedToken]> = part.tokens().spanned(part.span());
|
|
||||||
|
|
||||||
let (shapes, classified) = token_nodes.child(tokens, move |token_nodes| {
|
|
||||||
token_nodes.expand_infallible(ClassifiedCommandShape)
|
|
||||||
});
|
|
||||||
|
|
||||||
for shape in shapes {
|
|
||||||
match shape {
|
|
||||||
ShapeResult::Success(shape) => token_nodes.color_shape(shape),
|
|
||||||
ShapeResult::Fallback { shape, allowed } => {
|
|
||||||
token_nodes.color_err(shape, allowed)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
out.push(classified);
|
|
||||||
}
|
|
||||||
|
|
||||||
token_nodes.expand_infallible(BackoffColoringMode::new(vec!["no more tokens".to_string()]));
|
|
||||||
|
|
||||||
let end = token_nodes.span_at_cursor();
|
|
||||||
|
|
||||||
ClassifiedPipeline::commands(out, start.until(end))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum CommandHeadKind {
|
|
||||||
External,
|
|
||||||
Internal(Signature),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct CommandHeadShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for CommandHeadShape {
|
|
||||||
type Output = Result<CommandSignature, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"command head"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<CommandSignature, ParseError> {
|
|
||||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
|
||||||
|
|
||||||
let source = token_nodes.source();
|
|
||||||
let registry = &token_nodes.context().registry.clone_box();
|
|
||||||
|
|
||||||
token_nodes
|
|
||||||
.expand_token(ExternalCommandType, |(inner, outer)| {
|
|
||||||
Ok((
|
|
||||||
FlatShape::ExternalCommand,
|
|
||||||
CommandSignature::LiteralExternal { outer, inner },
|
|
||||||
))
|
|
||||||
})
|
|
||||||
.or_else(|_| {
|
|
||||||
token_nodes.expand_token(WordType, |span| {
|
|
||||||
let name = span.slice(&source);
|
|
||||||
if registry.has(name) {
|
|
||||||
let signature = registry.get(name).unwrap();
|
|
||||||
Ok((
|
|
||||||
FlatShape::InternalCommand,
|
|
||||||
CommandSignature::Internal(signature.spanned(span)),
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
Ok((FlatShape::ExternalCommand, CommandSignature::External(span)))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.or_else(|_| {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(AnyExpressionShape)
|
|
||||||
.map(CommandSignature::Expression)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ClassifiedCommandShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ClassifiedCommandShape {
|
|
||||||
type Output = ClassifiedCommand;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"classified command"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> ClassifiedCommand {
|
|
||||||
let start = token_nodes.span_at_cursor();
|
|
||||||
let source = token_nodes.source();
|
|
||||||
|
|
||||||
let head = match token_nodes.expand_syntax(CommandHeadShape) {
|
|
||||||
Err(err) => {
|
|
||||||
token_nodes
|
|
||||||
.expand_infallible(BackoffColoringMode::new(vec!["command".to_string()]));
|
|
||||||
return ClassifiedCommand::Error(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(head) => head,
|
|
||||||
};
|
|
||||||
|
|
||||||
match head {
|
|
||||||
CommandSignature::Expression(expr) => ClassifiedCommand::Error(ParseError::mismatch(
|
|
||||||
"command",
|
|
||||||
expr.type_name().spanned(expr.span),
|
|
||||||
)),
|
|
||||||
|
|
||||||
CommandSignature::External(name) => {
|
|
||||||
let name_str = name.slice(&source);
|
|
||||||
|
|
||||||
match external_command(token_nodes, name_str.tagged(name)) {
|
|
||||||
Err(err) => ClassifiedCommand::Error(err),
|
|
||||||
Ok(command) => command,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the command starts with `^`, treat it as an external command no matter what
|
|
||||||
CommandSignature::LiteralExternal { outer, inner } => {
|
|
||||||
let name_str = inner.slice(&source);
|
|
||||||
|
|
||||||
match external_command(token_nodes, name_str.tagged(outer)) {
|
|
||||||
Err(err) => ClassifiedCommand::Error(err),
|
|
||||||
Ok(command) => command,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
CommandSignature::Internal(signature) => {
|
|
||||||
let tail = parse_command_tail(&signature.item, token_nodes, signature.span);
|
|
||||||
|
|
||||||
let tail = match tail {
|
|
||||||
Err(err) => {
|
|
||||||
return ClassifiedCommand::Error(err);
|
|
||||||
}
|
|
||||||
Ok(tail) => tail,
|
|
||||||
};
|
|
||||||
|
|
||||||
let (positional, named) = match tail {
|
|
||||||
None => (None, None),
|
|
||||||
Some((positional, named)) => (positional, named),
|
|
||||||
};
|
|
||||||
|
|
||||||
let end = token_nodes.span_at_cursor();
|
|
||||||
|
|
||||||
let expr = hir::Expression::Command(signature.span).into_expr(signature.span);
|
|
||||||
|
|
||||||
let call = hir::Call {
|
|
||||||
head: Box::new(expr),
|
|
||||||
positional,
|
|
||||||
named,
|
|
||||||
span: start.until(end),
|
|
||||||
};
|
|
||||||
|
|
||||||
ClassifiedCommand::Internal(InternalCommand::new(
|
|
||||||
signature.item.name.clone(),
|
|
||||||
Tag {
|
|
||||||
span: signature.span,
|
|
||||||
anchor: None,
|
|
||||||
},
|
|
||||||
call,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct MaybeWhitespaceEof;
|
|
||||||
|
|
||||||
impl ExpandSyntax for MaybeWhitespaceEof {
|
|
||||||
type Output = Result<(), ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"<whitespace? eof>"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
|
||||||
token_nodes.expand_syntax(EofShape)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct EofShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for EofShape {
|
|
||||||
type Output = Result<(), ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"eof"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<(), ParseError> {
|
|
||||||
let next = token_nodes.peek();
|
|
||||||
let node = next.node;
|
|
||||||
|
|
||||||
match node {
|
|
||||||
None => Ok(()),
|
|
||||||
Some(node) => Err(ParseError::mismatch("eof", node.spanned_type_name())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct WhitespaceShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for WhitespaceShape {
|
|
||||||
type Output = Result<Span, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"whitespace"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
|
||||||
token_nodes.expand_token(WhitespaceType, |span| Ok((FlatShape::Whitespace, span)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct MaybeSpaceShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for MaybeSpaceShape {
|
|
||||||
type Output = Option<Span>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"whitespace?"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Option<Span> {
|
|
||||||
let result = token_nodes.expand_token(WhitespaceType, |span| {
|
|
||||||
Ok((FlatShape::Whitespace, Some(span)))
|
|
||||||
});
|
|
||||||
|
|
||||||
// No space is acceptable, but we need to err inside expand_token so we don't
|
|
||||||
// consume the non-whitespace token
|
|
||||||
result.unwrap_or(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct SpaceShape;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct CommandShape;
|
|
@ -1,159 +0,0 @@
|
|||||||
use crate::hir::Expression;
|
|
||||||
use crate::{
|
|
||||||
hir,
|
|
||||||
hir::syntax_shape::{
|
|
||||||
ExpandSyntax, ExpressionContinuationShape, MemberShape, PathTailShape, PathTailSyntax,
|
|
||||||
VariablePathShape,
|
|
||||||
},
|
|
||||||
hir::tokens_iterator::TokensIterator,
|
|
||||||
};
|
|
||||||
use hir::SpannedExpression;
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_source::Span;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct CoerceBlockShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for CoerceBlockShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"any block"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
// is it just a block?
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(BlockShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(ShorthandBlockShape))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct BlockShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for BlockShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"block"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
let exprs = token_nodes.block()?;
|
|
||||||
|
|
||||||
Ok(hir::Expression::Block(exprs.item).into_expr(exprs.span))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ShorthandBlockShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ShorthandBlockShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"shorthand block"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
let mut current = token_nodes.expand_syntax(ShorthandPath)?;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
match token_nodes.expand_syntax(ExpressionContinuationShape) {
|
|
||||||
Result::Err(_) => break,
|
|
||||||
Result::Ok(continuation) => current = continuation.append_to(current),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let span = current.span;
|
|
||||||
|
|
||||||
let block = hir::Expression::Block(vec![current]).into_expr(span);
|
|
||||||
|
|
||||||
Ok(block)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ShorthandPath;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ShorthandPath {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"shorthand path"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
// if it's a variable path, that's the head part
|
|
||||||
let path = token_nodes.expand_syntax(VariablePathShape);
|
|
||||||
|
|
||||||
if let Ok(path) = path {
|
|
||||||
return Ok(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
|
|
||||||
let mut head = token_nodes.expand_syntax(ShorthandHeadShape)?;
|
|
||||||
|
|
||||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
|
||||||
// like any other path.
|
|
||||||
let tail = token_nodes.expand_syntax(PathTailShape);
|
|
||||||
|
|
||||||
match tail {
|
|
||||||
Err(_) => Ok(head),
|
|
||||||
Ok(PathTailSyntax { tail, span }) => {
|
|
||||||
let span = head.span.until(span);
|
|
||||||
|
|
||||||
// For each member that `PathTailShape` expanded, join it onto the existing expression
|
|
||||||
// to form a new path
|
|
||||||
for member in tail {
|
|
||||||
head = Expression::dot_member(head, member).into_expr(span);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(head)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ShorthandHeadShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ShorthandHeadShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"shorthand head"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
let head = token_nodes.expand_syntax(MemberShape)?;
|
|
||||||
let head = head.to_path_member(&token_nodes.source());
|
|
||||||
|
|
||||||
// Synthesize an `$it` expression
|
|
||||||
let it = synthetic_it();
|
|
||||||
let span = head.span;
|
|
||||||
|
|
||||||
Ok(Expression::path(it, vec![head]).into_expr(span))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn synthetic_it() -> hir::SpannedExpression {
|
|
||||||
Expression::it_variable(Span::unknown()).into_expr(Span::unknown())
|
|
||||||
}
|
|
@ -1,72 +0,0 @@
|
|||||||
# Meaningful Primitive Tokens
|
|
||||||
|
|
||||||
- `int`
|
|
||||||
- `decimal`
|
|
||||||
- `op::name`
|
|
||||||
- `dot`
|
|
||||||
- `dotdot`
|
|
||||||
- `string`
|
|
||||||
- `var::it`
|
|
||||||
- `var::other`
|
|
||||||
- `external-command`
|
|
||||||
- `pattern::glob`
|
|
||||||
- `word`
|
|
||||||
- `comment`
|
|
||||||
- `whitespace`
|
|
||||||
- `separator`
|
|
||||||
- `longhand-flag`
|
|
||||||
- `shorthand-flag`
|
|
||||||
|
|
||||||
# Grouped Tokens
|
|
||||||
|
|
||||||
- `(call head ...tail)`
|
|
||||||
- `(list ...nodes)`
|
|
||||||
- `(paren ...nodes)`
|
|
||||||
- `(square ...nodes)`
|
|
||||||
- `(curly ...nodes)`
|
|
||||||
- `(pipeline ...elements) where elements: pipeline-element`
|
|
||||||
- `(pipeline-element pipe? token)`
|
|
||||||
|
|
||||||
# Atomic Tokens
|
|
||||||
|
|
||||||
- `(unit number unit) where number: number, unit: unit`
|
|
||||||
|
|
||||||
# Expression
|
|
||||||
|
|
||||||
```
|
|
||||||
start(ExpressionStart) continuation(ExpressionContinuation)* ->
|
|
||||||
```
|
|
||||||
|
|
||||||
## ExpressionStart
|
|
||||||
|
|
||||||
```
|
|
||||||
word -> String
|
|
||||||
unit -> Unit
|
|
||||||
number -> Number
|
|
||||||
string -> String
|
|
||||||
var::it -> Var::It
|
|
||||||
var::other -> Var::Other
|
|
||||||
pattern::glob -> Pattern::Glob
|
|
||||||
square -> Array
|
|
||||||
```
|
|
||||||
|
|
||||||
## TightExpressionContinuation
|
|
||||||
|
|
||||||
```
|
|
||||||
dot AnyExpression -> Member
|
|
||||||
dodot AnyExpression -> RangeContinuation
|
|
||||||
```
|
|
||||||
|
|
||||||
## InfixExpressionContinuation
|
|
||||||
|
|
||||||
```
|
|
||||||
whitespace op whitespace AnyExpression -> InfixContinuation
|
|
||||||
```
|
|
||||||
|
|
||||||
## Member
|
|
||||||
|
|
||||||
```
|
|
||||||
int -> Member::Int
|
|
||||||
word -> Member::Word
|
|
||||||
string -> Member::String
|
|
||||||
```
|
|
@ -1,77 +0,0 @@
|
|||||||
pub(crate) mod delimited;
|
|
||||||
pub(crate) mod file_path;
|
|
||||||
pub(crate) mod list;
|
|
||||||
pub(crate) mod number;
|
|
||||||
pub(crate) mod pattern;
|
|
||||||
pub(crate) mod range;
|
|
||||||
pub(crate) mod string;
|
|
||||||
pub(crate) mod unit;
|
|
||||||
pub(crate) mod variable_path;
|
|
||||||
|
|
||||||
use crate::hir::syntax_shape::{
|
|
||||||
BareExpressionShape, DelimitedSquareShape, ExpandContext, ExpandSyntax,
|
|
||||||
ExpressionContinuationShape, NumberExpressionShape, PatternExpressionShape,
|
|
||||||
StringExpressionShape, UnitExpressionShape, VariableShape,
|
|
||||||
};
|
|
||||||
use crate::hir::{SpannedExpression, TokensIterator};
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct AnyExpressionShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for AnyExpressionShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"any expression"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
// Look for an atomic expression at the cursor
|
|
||||||
let mut current = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
match token_nodes.expand_syntax(ExpressionContinuationShape) {
|
|
||||||
Err(_) => return Ok(current),
|
|
||||||
Ok(continuation) => current = continuation.append_to(current),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct AnyExpressionStartShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for AnyExpressionStartShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"any expression start"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(VariableShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(UnitExpressionShape))
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(BareExpressionShape))
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(PatternExpressionShape))
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(NumberExpressionShape))
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(StringExpressionShape))
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(DelimitedSquareShape))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf {
|
|
||||||
let expanded = shellexpand::tilde_with_context(string, || context.homedir());
|
|
||||||
|
|
||||||
PathBuf::from(expanded.as_ref())
|
|
||||||
}
|
|
@ -1,760 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::FlatShape;
|
|
||||||
use crate::hir::syntax_shape::{
|
|
||||||
expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape,
|
|
||||||
BarePatternShape, ExpandContext, UnitShape, UnitSyntax,
|
|
||||||
};
|
|
||||||
use crate::parse::operator::EvaluationOperator;
|
|
||||||
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
|
||||||
use crate::parse::tokens::UnspannedToken;
|
|
||||||
use crate::parse::unit::Unit;
|
|
||||||
use crate::{
|
|
||||||
hir,
|
|
||||||
hir::{Expression, RawNumber, TokensIterator},
|
|
||||||
parse::flag::{Flag, FlagKind},
|
|
||||||
};
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
|
||||||
use std::ops::Deref;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum UnspannedAtomicToken<'tokens> {
|
|
||||||
Eof {
|
|
||||||
span: Span,
|
|
||||||
},
|
|
||||||
Error {
|
|
||||||
error: Spanned<ShellError>,
|
|
||||||
},
|
|
||||||
Number {
|
|
||||||
number: RawNumber,
|
|
||||||
},
|
|
||||||
Size {
|
|
||||||
number: RawNumber,
|
|
||||||
unit: Spanned<Unit>,
|
|
||||||
},
|
|
||||||
String {
|
|
||||||
body: Span,
|
|
||||||
},
|
|
||||||
ItVariable {
|
|
||||||
name: Span,
|
|
||||||
},
|
|
||||||
Variable {
|
|
||||||
name: Span,
|
|
||||||
},
|
|
||||||
ExternalCommand {
|
|
||||||
command: Span,
|
|
||||||
},
|
|
||||||
ExternalWord {
|
|
||||||
text: Span,
|
|
||||||
},
|
|
||||||
GlobPattern {
|
|
||||||
pattern: Span,
|
|
||||||
},
|
|
||||||
Word {
|
|
||||||
text: Span,
|
|
||||||
},
|
|
||||||
SquareDelimited {
|
|
||||||
spans: (Span, Span),
|
|
||||||
nodes: &'tokens Vec<TokenNode>,
|
|
||||||
},
|
|
||||||
#[allow(unused)]
|
|
||||||
RoundDelimited {
|
|
||||||
spans: (Span, Span),
|
|
||||||
nodes: &'tokens Vec<TokenNode>,
|
|
||||||
},
|
|
||||||
ShorthandFlag {
|
|
||||||
name: Span,
|
|
||||||
},
|
|
||||||
CompareOperator {
|
|
||||||
text: Span,
|
|
||||||
},
|
|
||||||
Dot {
|
|
||||||
text: Span,
|
|
||||||
},
|
|
||||||
DotDot {
|
|
||||||
text: Span,
|
|
||||||
},
|
|
||||||
Whitespace {
|
|
||||||
text: Span,
|
|
||||||
},
|
|
||||||
Separator {
|
|
||||||
text: Span,
|
|
||||||
},
|
|
||||||
Comment {
|
|
||||||
body: Span,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tokens> UnspannedAtomicToken<'tokens> {
|
|
||||||
pub fn into_atomic_token(self, span: impl Into<Span>) -> AtomicToken<'tokens> {
|
|
||||||
AtomicToken {
|
|
||||||
unspanned: self,
|
|
||||||
span: span.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tokens> ShellTypeName for AtomicToken<'tokens> {
|
|
||||||
fn type_name(&self) -> &'static str {
|
|
||||||
self.unspanned.type_name()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tokens> ShellTypeName for UnspannedAtomicToken<'tokens> {
|
|
||||||
fn type_name(&self) -> &'static str {
|
|
||||||
match &self {
|
|
||||||
UnspannedAtomicToken::Eof { .. } => "eof",
|
|
||||||
UnspannedAtomicToken::Error { .. } => "error",
|
|
||||||
UnspannedAtomicToken::CompareOperator { .. } => "compare operator",
|
|
||||||
UnspannedAtomicToken::ShorthandFlag { .. } => "shorthand flag",
|
|
||||||
UnspannedAtomicToken::Whitespace { .. } => "whitespace",
|
|
||||||
UnspannedAtomicToken::Separator { .. } => "separator",
|
|
||||||
UnspannedAtomicToken::Comment { .. } => "comment",
|
|
||||||
UnspannedAtomicToken::Dot { .. } => "dot",
|
|
||||||
UnspannedAtomicToken::DotDot { .. } => "dotdot",
|
|
||||||
UnspannedAtomicToken::Number { .. } => "number",
|
|
||||||
UnspannedAtomicToken::Size { .. } => "size",
|
|
||||||
UnspannedAtomicToken::String { .. } => "string",
|
|
||||||
UnspannedAtomicToken::ItVariable { .. } => "$it",
|
|
||||||
UnspannedAtomicToken::Variable { .. } => "variable",
|
|
||||||
UnspannedAtomicToken::ExternalCommand { .. } => "external command",
|
|
||||||
UnspannedAtomicToken::ExternalWord { .. } => "external word",
|
|
||||||
UnspannedAtomicToken::GlobPattern { .. } => "file pattern",
|
|
||||||
UnspannedAtomicToken::Word { .. } => "word",
|
|
||||||
UnspannedAtomicToken::SquareDelimited { .. } => "array literal",
|
|
||||||
UnspannedAtomicToken::RoundDelimited { .. } => "paren delimited",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct AtomicToken<'tokens> {
|
|
||||||
pub unspanned: UnspannedAtomicToken<'tokens>,
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tokens> HasSpan for AtomicToken<'tokens> {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tokens> Deref for AtomicToken<'tokens> {
|
|
||||||
type Target = UnspannedAtomicToken<'tokens>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &UnspannedAtomicToken<'tokens> {
|
|
||||||
&self.unspanned
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tokens> AtomicToken<'tokens> {
|
|
||||||
pub fn to_hir(
|
|
||||||
&self,
|
|
||||||
context: &ExpandContext,
|
|
||||||
expected: &'static str,
|
|
||||||
) -> Result<hir::Expression, ParseError> {
|
|
||||||
Ok(match &self.unspanned {
|
|
||||||
UnspannedAtomicToken::Eof { .. } => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
expected,
|
|
||||||
"eof atomic token".spanned(self.span),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Error { .. } => {
|
|
||||||
return Err(ParseError::mismatch(expected, "error".spanned(self.span)))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::RoundDelimited { .. }
|
|
||||||
| UnspannedAtomicToken::CompareOperator { .. }
|
|
||||||
| UnspannedAtomicToken::ShorthandFlag { .. }
|
|
||||||
| UnspannedAtomicToken::Whitespace { .. }
|
|
||||||
| UnspannedAtomicToken::Separator { .. }
|
|
||||||
| UnspannedAtomicToken::Comment { .. }
|
|
||||||
| UnspannedAtomicToken::Dot { .. }
|
|
||||||
| UnspannedAtomicToken::DotDot { .. }
|
|
||||||
| UnspannedAtomicToken::SquareDelimited { .. } => {
|
|
||||||
return Err(ParseError::mismatch(expected, self.spanned_type_name()));
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Number { number } => {
|
|
||||||
Expression::number(number.to_number(context.source), self.span)
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Size { number, unit } => {
|
|
||||||
Expression::size(number.to_number(context.source), **unit, self.span)
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::String { body } => Expression::string(*body, self.span),
|
|
||||||
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, self.span),
|
|
||||||
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, self.span),
|
|
||||||
UnspannedAtomicToken::ExternalCommand { command } => {
|
|
||||||
Expression::external_command(*command, self.span)
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::ExternalWord { text } => Expression::string(*text, self.span),
|
|
||||||
UnspannedAtomicToken::GlobPattern { pattern } => Expression::pattern(
|
|
||||||
expand_file_path(pattern.slice(context.source), context).to_string_lossy(),
|
|
||||||
self.span,
|
|
||||||
),
|
|
||||||
UnspannedAtomicToken::Word { text } => Expression::string(*text, *text),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
|
|
||||||
match &self.unspanned {
|
|
||||||
UnspannedAtomicToken::Eof { .. } => {}
|
|
||||||
UnspannedAtomicToken::Error { .. } => shapes.push(FlatShape::Error.spanned(self.span)),
|
|
||||||
UnspannedAtomicToken::CompareOperator { .. } => {
|
|
||||||
shapes.push(FlatShape::CompareOperator.spanned(self.span))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::ShorthandFlag { .. } => {
|
|
||||||
shapes.push(FlatShape::ShorthandFlag.spanned(self.span))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Whitespace { .. } => {
|
|
||||||
shapes.push(FlatShape::Whitespace.spanned(self.span))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Number {
|
|
||||||
number: RawNumber::Decimal(_),
|
|
||||||
} => shapes.push(FlatShape::Decimal.spanned(self.span)),
|
|
||||||
UnspannedAtomicToken::Number {
|
|
||||||
number: RawNumber::Int(_),
|
|
||||||
} => shapes.push(FlatShape::Int.spanned(self.span)),
|
|
||||||
UnspannedAtomicToken::Size { number, unit } => shapes.push(
|
|
||||||
FlatShape::Size {
|
|
||||||
number: number.span(),
|
|
||||||
unit: unit.span,
|
|
||||||
}
|
|
||||||
.spanned(self.span),
|
|
||||||
),
|
|
||||||
UnspannedAtomicToken::String { .. } => {
|
|
||||||
shapes.push(FlatShape::String.spanned(self.span))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::ItVariable { .. } => {
|
|
||||||
shapes.push(FlatShape::ItVariable.spanned(self.span))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Variable { .. } => {
|
|
||||||
shapes.push(FlatShape::Variable.spanned(self.span))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::ExternalCommand { .. } => {
|
|
||||||
shapes.push(FlatShape::ExternalCommand.spanned(self.span))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::ExternalWord { .. } => {
|
|
||||||
shapes.push(FlatShape::ExternalWord.spanned(self.span))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::GlobPattern { .. } => {
|
|
||||||
shapes.push(FlatShape::GlobPattern.spanned(self.span))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Word { .. } => shapes.push(FlatShape::Word.spanned(self.span)),
|
|
||||||
_ => shapes.push(FlatShape::Error.spanned(self.span)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for AtomicToken<'_> {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
fn atom(value: DebugDocBuilder) -> DebugDocBuilder {
|
|
||||||
b::delimit("(", b::kind("atom") + b::space() + value.group(), ")").group()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn atom_kind(kind: impl std::fmt::Display, value: DebugDocBuilder) -> DebugDocBuilder {
|
|
||||||
b::delimit(
|
|
||||||
"(",
|
|
||||||
(b::kind("atom") + b::delimit("[", b::kind(kind), "]")).group()
|
|
||||||
+ b::space()
|
|
||||||
+ value.group(),
|
|
||||||
")",
|
|
||||||
)
|
|
||||||
.group()
|
|
||||||
}
|
|
||||||
|
|
||||||
atom(match &self.unspanned {
|
|
||||||
UnspannedAtomicToken::Eof { .. } => b::description("eof"),
|
|
||||||
UnspannedAtomicToken::Error { .. } => b::error("error"),
|
|
||||||
UnspannedAtomicToken::Number { number } => number.pretty_debug(source),
|
|
||||||
UnspannedAtomicToken::Size { number, unit } => {
|
|
||||||
number.pretty_debug(source) + b::keyword(unit.span.slice(source))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::String { body } => b::primitive(body.slice(source)),
|
|
||||||
UnspannedAtomicToken::ItVariable { .. } | UnspannedAtomicToken::Variable { .. } => {
|
|
||||||
b::keyword(self.span.slice(source))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::ExternalCommand { .. } => b::primitive(self.span.slice(source)),
|
|
||||||
UnspannedAtomicToken::ExternalWord { text } => {
|
|
||||||
atom_kind("external word", b::primitive(text.slice(source)))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::GlobPattern { pattern } => {
|
|
||||||
atom_kind("pattern", b::primitive(pattern.slice(source)))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Word { text } => {
|
|
||||||
atom_kind("word", b::primitive(text.slice(source)))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::SquareDelimited { nodes, .. } => b::delimit(
|
|
||||||
"[",
|
|
||||||
b::intersperse_with_source(nodes.iter(), b::space(), source),
|
|
||||||
"]",
|
|
||||||
),
|
|
||||||
UnspannedAtomicToken::RoundDelimited { nodes, .. } => b::delimit(
|
|
||||||
"(",
|
|
||||||
b::intersperse_with_source(nodes.iter(), b::space(), source),
|
|
||||||
")",
|
|
||||||
),
|
|
||||||
UnspannedAtomicToken::ShorthandFlag { name } => {
|
|
||||||
atom_kind("shorthand flag", b::key(name.slice(source)))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Dot { .. } => atom(b::kind("dot")),
|
|
||||||
UnspannedAtomicToken::DotDot { .. } => atom(b::kind("dotdot")),
|
|
||||||
UnspannedAtomicToken::CompareOperator { text } => {
|
|
||||||
atom_kind("operator", b::keyword(text.slice(source)))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Whitespace { text } => atom_kind(
|
|
||||||
"whitespace",
|
|
||||||
b::description(format!("{:?}", text.slice(source))),
|
|
||||||
),
|
|
||||||
UnspannedAtomicToken::Separator { text } => atom_kind(
|
|
||||||
"separator",
|
|
||||||
b::description(format!("{:?}", text.slice(source))),
|
|
||||||
),
|
|
||||||
UnspannedAtomicToken::Comment { body } => {
|
|
||||||
atom_kind("comment", b::description(body.slice(source)))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum WhitespaceHandling {
|
|
||||||
#[allow(unused)]
|
|
||||||
AllowWhitespace,
|
|
||||||
RejectWhitespace,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ExpansionRule {
|
|
||||||
pub(crate) allow_external_command: bool,
|
|
||||||
pub(crate) allow_external_word: bool,
|
|
||||||
pub(crate) allow_cmp_operator: bool,
|
|
||||||
pub(crate) allow_eval_operator: bool,
|
|
||||||
pub(crate) allow_eof: bool,
|
|
||||||
pub(crate) allow_separator: bool,
|
|
||||||
pub(crate) treat_size_as_word: bool,
|
|
||||||
pub(crate) separate_members: bool,
|
|
||||||
pub(crate) commit_errors: bool,
|
|
||||||
pub(crate) whitespace: WhitespaceHandling,
|
|
||||||
pub(crate) allow_comments: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpansionRule {
|
|
||||||
pub fn new() -> ExpansionRule {
|
|
||||||
ExpansionRule {
|
|
||||||
allow_external_command: false,
|
|
||||||
allow_external_word: false,
|
|
||||||
allow_eval_operator: false,
|
|
||||||
allow_cmp_operator: false,
|
|
||||||
allow_eof: false,
|
|
||||||
treat_size_as_word: false,
|
|
||||||
separate_members: false,
|
|
||||||
commit_errors: false,
|
|
||||||
allow_separator: false,
|
|
||||||
whitespace: WhitespaceHandling::RejectWhitespace,
|
|
||||||
allow_comments: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The intent of permissive mode is to return an atomic token for every possible
|
|
||||||
/// input token. This is important for error-correcting parsing, such as the
|
|
||||||
/// syntax highlighter.
|
|
||||||
pub fn permissive() -> ExpansionRule {
|
|
||||||
ExpansionRule {
|
|
||||||
allow_external_command: true,
|
|
||||||
allow_external_word: true,
|
|
||||||
allow_cmp_operator: true,
|
|
||||||
allow_eval_operator: true,
|
|
||||||
allow_eof: true,
|
|
||||||
separate_members: false,
|
|
||||||
treat_size_as_word: false,
|
|
||||||
commit_errors: true,
|
|
||||||
allow_separator: true,
|
|
||||||
allow_comments: true,
|
|
||||||
whitespace: WhitespaceHandling::AllowWhitespace,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn allow_external_command(mut self) -> ExpansionRule {
|
|
||||||
self.allow_external_command = true;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn allow_cmp_operator(mut self) -> ExpansionRule {
|
|
||||||
self.allow_cmp_operator = true;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn no_cmp_operator(mut self) -> ExpansionRule {
|
|
||||||
self.allow_cmp_operator = false;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn allow_eval_operator(mut self) -> ExpansionRule {
|
|
||||||
self.allow_eval_operator = true;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn no_operator(mut self) -> ExpansionRule {
|
|
||||||
self.allow_eval_operator = false;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn no_external_command(mut self) -> ExpansionRule {
|
|
||||||
self.allow_external_command = false;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn allow_external_word(mut self) -> ExpansionRule {
|
|
||||||
self.allow_external_word = true;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn no_external_word(mut self) -> ExpansionRule {
|
|
||||||
self.allow_external_word = false;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn treat_size_as_word(mut self) -> ExpansionRule {
|
|
||||||
self.treat_size_as_word = true;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn separate_members(mut self) -> ExpansionRule {
|
|
||||||
self.separate_members = true;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn no_separate_members(mut self) -> ExpansionRule {
|
|
||||||
self.separate_members = false;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn commit_errors(mut self) -> ExpansionRule {
|
|
||||||
self.commit_errors = true;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn allow_whitespace(mut self) -> ExpansionRule {
|
|
||||||
self.whitespace = WhitespaceHandling::AllowWhitespace;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn reject_whitespace(mut self) -> ExpansionRule {
|
|
||||||
self.whitespace = WhitespaceHandling::RejectWhitespace;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn allow_separator(mut self) -> ExpansionRule {
|
|
||||||
self.allow_separator = true;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn reject_separator(mut self) -> ExpansionRule {
|
|
||||||
self.allow_separator = false;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn allow_comments(mut self) -> ExpansionRule {
|
|
||||||
self.allow_comments = true;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn reject_comments(mut self) -> ExpansionRule {
|
|
||||||
self.allow_comments = false;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expand_atom<'me, 'content>(
|
|
||||||
token_nodes: &'me mut TokensIterator<'content>,
|
|
||||||
expected: &'static str,
|
|
||||||
context: &ExpandContext,
|
|
||||||
rule: ExpansionRule,
|
|
||||||
) -> Result<AtomicToken<'content>, ParseError> {
|
|
||||||
token_nodes.with_expand_tracer(|_, tracer| tracer.start("atom"));
|
|
||||||
|
|
||||||
let result = expand_atom_inner(token_nodes, expected, context, rule);
|
|
||||||
|
|
||||||
token_nodes.with_expand_tracer(|_, tracer| match &result {
|
|
||||||
Ok(result) => {
|
|
||||||
tracer.add_result(result.clone());
|
|
||||||
tracer.success();
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(err) => tracer.failed(err),
|
|
||||||
});
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If the caller of expand_atom throws away the returned atomic token returned, it
|
|
||||||
/// must use a checkpoint to roll it back.
|
|
||||||
fn expand_atom_inner<'me, 'content>(
|
|
||||||
token_nodes: &'me mut TokensIterator<'content>,
|
|
||||||
expected: &'static str,
|
|
||||||
context: &ExpandContext,
|
|
||||||
rule: ExpansionRule,
|
|
||||||
) -> Result<AtomicToken<'content>, ParseError> {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
if rule.allow_eof {
|
|
||||||
return Ok(UnspannedAtomicToken::Eof {
|
|
||||||
span: Span::unknown(),
|
|
||||||
}
|
|
||||||
.into_atomic_token(Span::unknown()));
|
|
||||||
} else {
|
|
||||||
return Err(ParseError::unexpected_eof("anything", Span::unknown()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// First, we'll need to handle the situation where more than one token corresponds
|
|
||||||
// to a single atomic token
|
|
||||||
|
|
||||||
// If treat_size_as_word, don't try to parse the head of the token stream
|
|
||||||
// as a size.
|
|
||||||
if !rule.treat_size_as_word {
|
|
||||||
match expand_syntax(&UnitShape, token_nodes, context) {
|
|
||||||
// If the head of the stream isn't a valid unit, we'll try to parse
|
|
||||||
// it again next as a word
|
|
||||||
Err(_) => {}
|
|
||||||
|
|
||||||
// But if it was a valid unit, we're done here
|
|
||||||
Ok(UnitSyntax {
|
|
||||||
unit: (number, unit),
|
|
||||||
span,
|
|
||||||
}) => return Ok(UnspannedAtomicToken::Size { number, unit }.into_atomic_token(span)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if rule.separate_members {
|
|
||||||
let mut next = token_nodes.peek_any();
|
|
||||||
|
|
||||||
match next.node {
|
|
||||||
Some(token) if token.is_word() => {
|
|
||||||
next.commit();
|
|
||||||
return Ok(UnspannedAtomicToken::Word { text: token.span() }
|
|
||||||
.into_atomic_token(token.span()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(token) if token.is_int() => {
|
|
||||||
next.commit();
|
|
||||||
return Ok(UnspannedAtomicToken::Number {
|
|
||||||
number: RawNumber::Int(token.span()),
|
|
||||||
}
|
|
||||||
.into_atomic_token(token.span()));
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to parse the head of the stream as a bare path. A bare path includes
|
|
||||||
// words as well as `.`s, connected together without whitespace.
|
|
||||||
match expand_syntax(&BarePathShape, token_nodes, context) {
|
|
||||||
// If we didn't find a bare path
|
|
||||||
Err(_) => {}
|
|
||||||
Ok(span) => {
|
|
||||||
let next = token_nodes.peek_any();
|
|
||||||
|
|
||||||
match next.node {
|
|
||||||
Some(token) if token.is_pattern() => {
|
|
||||||
// if the very next token is a pattern, we're looking at a glob, not a
|
|
||||||
// word, and we should try to parse it as a glob next
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => return Ok(UnspannedAtomicToken::Word { text: span }.into_atomic_token(span)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to parse the head of the stream as a pattern. A pattern includes
|
|
||||||
// words, words with `*` as well as `.`s, connected together without whitespace.
|
|
||||||
match expand_syntax(&BarePatternShape, token_nodes, context) {
|
|
||||||
// If we didn't find a bare path
|
|
||||||
Err(_) => {}
|
|
||||||
Ok(span) => {
|
|
||||||
return Ok(UnspannedAtomicToken::GlobPattern { pattern: span }.into_atomic_token(span))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The next token corresponds to at most one atomic token
|
|
||||||
|
|
||||||
// We need to `peek` because `parse_single_node` doesn't cover all of the
|
|
||||||
// cases that `expand_atom` covers. We should probably collapse the two
|
|
||||||
// if possible.
|
|
||||||
let peeked = token_nodes.peek_any().not_eof(expected)?;
|
|
||||||
|
|
||||||
match peeked.node {
|
|
||||||
TokenNode::Token(_) => {
|
|
||||||
// handle this next
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenNode::Error(error) => {
|
|
||||||
peeked.commit();
|
|
||||||
return Ok(UnspannedAtomicToken::Error {
|
|
||||||
error: error.clone(),
|
|
||||||
}
|
|
||||||
.into_atomic_token(error.span));
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenNode::Separator(span) if rule.allow_separator => {
|
|
||||||
peeked.commit();
|
|
||||||
return Ok(UnspannedAtomicToken::Separator { text: *span }.into_atomic_token(span));
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenNode::Comment(comment) if rule.allow_comments => {
|
|
||||||
peeked.commit();
|
|
||||||
return Ok(UnspannedAtomicToken::Comment { body: comment.text }
|
|
||||||
.into_atomic_token(comment.span()));
|
|
||||||
}
|
|
||||||
|
|
||||||
// [ ... ]
|
|
||||||
TokenNode::Delimited(Spanned {
|
|
||||||
item:
|
|
||||||
DelimitedNode {
|
|
||||||
delimiter: Delimiter::Square,
|
|
||||||
spans,
|
|
||||||
children,
|
|
||||||
},
|
|
||||||
span,
|
|
||||||
}) => {
|
|
||||||
peeked.commit();
|
|
||||||
let span = *span;
|
|
||||||
return Ok(UnspannedAtomicToken::SquareDelimited {
|
|
||||||
nodes: children,
|
|
||||||
spans: *spans,
|
|
||||||
}
|
|
||||||
.into_atomic_token(span));
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenNode::Flag(Flag {
|
|
||||||
kind: FlagKind::Shorthand,
|
|
||||||
name,
|
|
||||||
span,
|
|
||||||
}) => {
|
|
||||||
peeked.commit();
|
|
||||||
return Ok(UnspannedAtomicToken::ShorthandFlag { name: *name }.into_atomic_token(*span));
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenNode::Flag(Flag {
|
|
||||||
kind: FlagKind::Longhand,
|
|
||||||
name,
|
|
||||||
span,
|
|
||||||
}) => {
|
|
||||||
peeked.commit();
|
|
||||||
return Ok(UnspannedAtomicToken::ShorthandFlag { name: *name }.into_atomic_token(*span));
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we see whitespace, process the whitespace according to the whitespace
|
|
||||||
// handling rules
|
|
||||||
TokenNode::Whitespace(span) => match rule.whitespace {
|
|
||||||
// if whitespace is allowed, return a whitespace token
|
|
||||||
WhitespaceHandling::AllowWhitespace => {
|
|
||||||
peeked.commit();
|
|
||||||
return Ok(
|
|
||||||
UnspannedAtomicToken::Whitespace { text: *span }.into_atomic_token(*span)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// if whitespace is disallowed, return an error
|
|
||||||
WhitespaceHandling::RejectWhitespace => {
|
|
||||||
return Err(ParseError::mismatch(expected, "whitespace".spanned(*span)))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
other => {
|
|
||||||
let span = peeked.node.span();
|
|
||||||
|
|
||||||
peeked.commit();
|
|
||||||
return Ok(UnspannedAtomicToken::Error {
|
|
||||||
error: ShellError::type_error("token", other.type_name().spanned(span))
|
|
||||||
.spanned(span),
|
|
||||||
}
|
|
||||||
.into_atomic_token(span));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_single_node(token_nodes, expected, |token, token_span, err| {
|
|
||||||
Ok(match token {
|
|
||||||
// First, the error cases. Each error case corresponds to a expansion rule
|
|
||||||
// flag that can be used to allow the case
|
|
||||||
|
|
||||||
// rule.allow_cmp_operator
|
|
||||||
UnspannedToken::CompareOperator(_) if !rule.allow_cmp_operator => {
|
|
||||||
return Err(err.error())
|
|
||||||
}
|
|
||||||
|
|
||||||
// rule.allow_eval_operator
|
|
||||||
UnspannedToken::EvaluationOperator(_) if !rule.allow_eval_operator => {
|
|
||||||
return Err(err.error())
|
|
||||||
}
|
|
||||||
|
|
||||||
// rule.allow_external_command
|
|
||||||
UnspannedToken::ExternalCommand(_) if !rule.allow_external_command => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
expected,
|
|
||||||
token.type_name().spanned(token_span),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
// rule.allow_external_word
|
|
||||||
UnspannedToken::ExternalWord if !rule.allow_external_word => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
expected,
|
|
||||||
"external word".spanned(token_span),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
UnspannedToken::Number(number) => {
|
|
||||||
UnspannedAtomicToken::Number { number }.into_atomic_token(token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::CompareOperator(_) => {
|
|
||||||
UnspannedAtomicToken::CompareOperator { text: token_span }
|
|
||||||
.into_atomic_token(token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => {
|
|
||||||
UnspannedAtomicToken::Dot { text: token_span }.into_atomic_token(token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => {
|
|
||||||
UnspannedAtomicToken::DotDot { text: token_span }.into_atomic_token(token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::String(body) => {
|
|
||||||
UnspannedAtomicToken::String { body }.into_atomic_token(token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::Variable(name) if name.slice(context.source) == "it" => {
|
|
||||||
UnspannedAtomicToken::ItVariable { name }.into_atomic_token(token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::Variable(name) => {
|
|
||||||
UnspannedAtomicToken::Variable { name }.into_atomic_token(token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalCommand(command) => {
|
|
||||||
UnspannedAtomicToken::ExternalCommand { command }.into_atomic_token(token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalWord => UnspannedAtomicToken::ExternalWord { text: token_span }
|
|
||||||
.into_atomic_token(token_span),
|
|
||||||
UnspannedToken::GlobPattern => UnspannedAtomicToken::GlobPattern {
|
|
||||||
pattern: token_span,
|
|
||||||
}
|
|
||||||
.into_atomic_token(token_span),
|
|
||||||
UnspannedToken::Bare => {
|
|
||||||
UnspannedAtomicToken::Word { text: token_span }.into_atomic_token(token_span)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,24 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::ExpandSyntax;
|
|
||||||
use crate::hir::SpannedExpression;
|
|
||||||
use crate::{hir, hir::TokensIterator};
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct DelimitedSquareShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for DelimitedSquareShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"delimited square"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
let exprs = token_nodes.square()?;
|
|
||||||
|
|
||||||
Ok(hir::Expression::list(exprs.item).into_expr(exprs.span))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,62 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::{
|
|
||||||
expression::expand_file_path, BarePathShape, DecimalShape, ExpandContext, ExpandSyntax,
|
|
||||||
FlatShape, IntShape, StringShape,
|
|
||||||
};
|
|
||||||
use crate::hir::{Expression, SpannedExpression, TokensIterator};
|
|
||||||
use crate::parse::token_tree::ExternalWordType;
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_source::{HasSpan, Span};
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct FilePathShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for FilePathShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"file path"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(BarePathShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(ExternalWordShape))
|
|
||||||
.map(|span| file_path(span, token_nodes.context()).into_expr(span))
|
|
||||||
.or_else(|_| {
|
|
||||||
token_nodes.expand_syntax(StringShape).map(|syntax| {
|
|
||||||
file_path(syntax.inner, token_nodes.context()).into_expr(syntax.span)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.or_else(|_| {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(IntShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(DecimalShape))
|
|
||||||
.map(|number| {
|
|
||||||
file_path(number.span(), token_nodes.context()).into_expr(number.span())
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.map_err(|_| token_nodes.err_next_token("file path"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file_path(text: Span, context: &ExpandContext) -> Expression {
|
|
||||||
Expression::FilePath(expand_file_path(text.slice(context.source), context))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ExternalWordShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ExternalWordShape {
|
|
||||||
type Output = Result<Span, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"external word"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
|
||||||
token_nodes.expand_token(ExternalWordType, |span| Ok((FlatShape::ExternalWord, span)))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,170 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
|
||||||
use crate::{
|
|
||||||
hir,
|
|
||||||
hir::syntax_shape::{AnyExpressionShape, ExpandSyntax, MaybeSpaceShape},
|
|
||||||
hir::TokensIterator,
|
|
||||||
};
|
|
||||||
use derive_new::new;
|
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ExpressionListSyntax {
|
|
||||||
pub exprs: Spanned<Vec<hir::SpannedExpression>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for ExpressionListSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.exprs.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for ExpressionListSyntax {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::intersperse(
|
|
||||||
self.exprs.iter().map(|e| e.pretty_debug(source)),
|
|
||||||
b::space(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ExpressionListShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ExpressionListShape {
|
|
||||||
type Output = ExpressionListSyntax;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"expression list"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &mut TokensIterator<'_>) -> ExpressionListSyntax {
|
|
||||||
// We encountered a parsing error and will continue with simpler coloring ("backoff
|
|
||||||
// coloring mode")
|
|
||||||
let mut backoff = false;
|
|
||||||
|
|
||||||
let mut exprs = vec![];
|
|
||||||
|
|
||||||
let start = token_nodes.span_at_cursor();
|
|
||||||
|
|
||||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
|
||||||
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
return ExpressionListSyntax {
|
|
||||||
exprs: exprs.spanned(start),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let expr = token_nodes.expand_syntax(AnyExpressionShape);
|
|
||||||
|
|
||||||
match expr {
|
|
||||||
Ok(expr) => exprs.push(expr),
|
|
||||||
Err(_) => backoff = true,
|
|
||||||
}
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
let end = token_nodes.span_at_cursor();
|
|
||||||
return ExpressionListSyntax {
|
|
||||||
exprs: exprs.spanned(start.until(end)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if backoff {
|
|
||||||
let len = token_nodes.state().shapes().len();
|
|
||||||
|
|
||||||
// If we previously encountered a parsing error, use backoff coloring mode
|
|
||||||
token_nodes
|
|
||||||
.expand_infallible(SimplestExpression::new(vec!["expression".to_string()]));
|
|
||||||
|
|
||||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
|
||||||
// This should never happen, but if it does, a panic is better than an infinite loop
|
|
||||||
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let expr = token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
|
||||||
token_nodes.expand_syntax(AnyExpressionShape)
|
|
||||||
});
|
|
||||||
|
|
||||||
match expr {
|
|
||||||
Ok(expr) => exprs.push(expr),
|
|
||||||
Err(_) => {
|
|
||||||
backoff = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Otherwise, move on to the next expression
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
|
|
||||||
#[derive(Debug, Clone, new)]
|
|
||||||
pub struct BackoffColoringMode {
|
|
||||||
allowed: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandSyntax for BackoffColoringMode {
|
|
||||||
type Output = Option<Span>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"BackoffColoringMode"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Option<Span> {
|
|
||||||
loop {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let len = token_nodes.state().shapes().len();
|
|
||||||
token_nodes.expand_infallible(SimplestExpression::new(self.allowed.clone()));
|
|
||||||
|
|
||||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
|
||||||
// This shouldn't happen, but if it does, a panic is better than an infinite loop
|
|
||||||
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
|
|
||||||
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
|
|
||||||
/// expression, fall back to simple coloring.
|
|
||||||
#[derive(Debug, Clone, new)]
|
|
||||||
pub struct SimplestExpression {
|
|
||||||
valid_shapes: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandSyntax for SimplestExpression {
|
|
||||||
type Output = Span;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"SimplestExpression"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Span {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
return Span::unknown();
|
|
||||||
}
|
|
||||||
|
|
||||||
let source = token_nodes.source();
|
|
||||||
|
|
||||||
let peeked = token_nodes.peek();
|
|
||||||
|
|
||||||
match peeked.not_eof("simplest expression") {
|
|
||||||
Err(_) => token_nodes.span_at_cursor(),
|
|
||||||
Ok(peeked) => {
|
|
||||||
let token = peeked.commit();
|
|
||||||
|
|
||||||
for shape in FlatShape::shapes(token, &source) {
|
|
||||||
token_nodes.color_err(shape, self.valid_shapes.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
token.span()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,109 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::{ExpandSyntax, FlatShape};
|
|
||||||
use crate::hir::{Expression, SpannedExpression};
|
|
||||||
use crate::hir::{RawNumber, TokensIterator};
|
|
||||||
use crate::parse::token_tree::{DecimalType, IntType};
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_source::HasSpan;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct NumberExpressionShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for NumberExpressionShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"number"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
let source = token_nodes.source();
|
|
||||||
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(NumberShape)
|
|
||||||
.map(|number| Expression::number(number.to_number(&source)).into_expr(number.span()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct IntExpressionShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for IntExpressionShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"integer"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
let source = token_nodes.source();
|
|
||||||
|
|
||||||
token_nodes.expand_token(IntType, |number| {
|
|
||||||
Ok((
|
|
||||||
FlatShape::Int,
|
|
||||||
Expression::number(number.to_number(&source)),
|
|
||||||
))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct IntShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for IntShape {
|
|
||||||
type Output = Result<RawNumber, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"integer"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<RawNumber, ParseError> {
|
|
||||||
token_nodes.expand_token(IntType, |number| Ok((FlatShape::Int, number)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct DecimalShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for DecimalShape {
|
|
||||||
type Output = Result<RawNumber, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"decimal"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<RawNumber, ParseError> {
|
|
||||||
token_nodes.expand_token(DecimalType, |number| Ok((FlatShape::Decimal, number)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct NumberShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for NumberShape {
|
|
||||||
type Output = Result<RawNumber, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"decimal"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<RawNumber, ParseError> {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(IntShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(DecimalShape))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,86 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::{
|
|
||||||
expand_bare, expression::expand_file_path, BarePathShape, ExpandContext, ExpandSyntax,
|
|
||||||
ExternalWordShape, StringShape,
|
|
||||||
};
|
|
||||||
use crate::hir::{Expression, SpannedExpression};
|
|
||||||
use crate::parse::operator::EvaluationOperator;
|
|
||||||
use crate::{hir, hir::TokensIterator, Token};
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_source::Span;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct PatternShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for PatternShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"glob pattern"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<hir::SpannedExpression, ParseError> {
|
|
||||||
let (inner, outer) = token_nodes
|
|
||||||
.expand_syntax(BarePatternShape)
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(BarePathShape))
|
|
||||||
.or_else(|_| token_nodes.expand_syntax(ExternalWordShape))
|
|
||||||
.map(|span| (span, span))
|
|
||||||
.or_else(|_| {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(StringShape)
|
|
||||||
.map(|syntax| (syntax.inner, syntax.span))
|
|
||||||
})
|
|
||||||
.map_err(|_| token_nodes.err_next_token("glob pattern"))?;
|
|
||||||
|
|
||||||
Ok(file_pattern(inner, outer, token_nodes.context()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file_pattern(body: Span, outer: Span, context: &ExpandContext) -> SpannedExpression {
|
|
||||||
let path = expand_file_path(body.slice(context.source), context);
|
|
||||||
Expression::pattern(path.to_string_lossy()).into_expr(outer)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct PatternExpressionShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for PatternExpressionShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"pattern"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
token_nodes.expand_syntax(BarePatternShape).map(|span| {
|
|
||||||
let path = expand_file_path(span.slice(&token_nodes.source()), token_nodes.context());
|
|
||||||
Expression::pattern(path.to_string_lossy()).into_expr(span)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct BarePatternShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for BarePatternShape {
|
|
||||||
type Output = Result<Span, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"bare pattern"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
|
||||||
expand_bare(token_nodes, |token| match token.unspanned() {
|
|
||||||
Token::Bare
|
|
||||||
| Token::EvaluationOperator(EvaluationOperator::Dot)
|
|
||||||
| Token::GlobPattern => true,
|
|
||||||
|
|
||||||
_ => false,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,47 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::{AnyExpressionStartShape, ExpandSyntax, FlatShape};
|
|
||||||
use crate::hir::TokensIterator;
|
|
||||||
use crate::hir::{Expression, SpannedExpression};
|
|
||||||
use crate::parse::token_tree::DotDotType;
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_source::{HasSpan, Span};
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct RangeShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for RangeShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"range"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
let left = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
|
||||||
let dotdot = token_nodes.expand_syntax(DotDotShape)?;
|
|
||||||
let right = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
|
||||||
|
|
||||||
let span = left.span.until(right.span);
|
|
||||||
|
|
||||||
Ok(Expression::range(left, dotdot, right).into_expr(span))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct DotDotShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for DotDotShape {
|
|
||||||
type Output = Result<Span, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"dotdot"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
|
||||||
token_nodes.expand_token(DotDotType, |token| Ok((FlatShape::DotDot, token.span())))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,103 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::{ExpandSyntax, FlatShape, NumberShape, VariableShape};
|
|
||||||
use crate::hir::TokensIterator;
|
|
||||||
use crate::hir::{Expression, SpannedExpression};
|
|
||||||
use crate::parse::token_tree::{BareType, StringType};
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct CoerceStringShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for CoerceStringShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"StringShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
token_nodes
|
|
||||||
.expand_token(StringType, |(inner, outer)| {
|
|
||||||
Ok((
|
|
||||||
FlatShape::String,
|
|
||||||
Expression::string(inner).into_expr(outer),
|
|
||||||
))
|
|
||||||
})
|
|
||||||
.or_else(|_| {
|
|
||||||
token_nodes.expand_token(BareType, |span| {
|
|
||||||
Ok((FlatShape::String, Expression::string(span).into_expr(span)))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.or_else(|_| {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(NumberShape)
|
|
||||||
.map(|number| Expression::string(number.span()).into_expr(number.span()))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct StringExpressionShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for StringExpressionShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"string"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
token_nodes.expand_syntax(VariableShape).or_else(|_| {
|
|
||||||
token_nodes.expand_token(StringType, |(inner, outer)| {
|
|
||||||
Ok((
|
|
||||||
FlatShape::String,
|
|
||||||
Expression::string(inner).into_expr(outer),
|
|
||||||
))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct StringSyntax {
|
|
||||||
pub inner: Span,
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for StringSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for StringSyntax {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::primitive(self.span.slice(source))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct StringShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for StringShape {
|
|
||||||
type Output = Result<StringSyntax, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"string"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<StringSyntax, ParseError> {
|
|
||||||
token_nodes.expand_token(StringType, |(inner, outer)| {
|
|
||||||
Ok((FlatShape::String, StringSyntax { inner, span: outer }))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,156 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
|
||||||
use crate::hir::syntax_shape::ExpandSyntax;
|
|
||||||
use crate::hir::TokensIterator;
|
|
||||||
use crate::hir::{Expression, SpannedExpression};
|
|
||||||
use crate::parse::number::RawNumber;
|
|
||||||
use crate::parse::token_tree::BareType;
|
|
||||||
use crate::parse::unit::Unit;
|
|
||||||
use nom::branch::alt;
|
|
||||||
use nom::bytes::complete::tag;
|
|
||||||
use nom::character::complete::digit1;
|
|
||||||
use nom::combinator::{all_consuming, opt, value};
|
|
||||||
use nom::IResult;
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_source::{
|
|
||||||
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct UnitSyntax {
|
|
||||||
pub unit: (RawNumber, Spanned<Unit>),
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UnitSyntax {
|
|
||||||
pub fn into_expr(self, source: &Text) -> SpannedExpression {
|
|
||||||
let UnitSyntax {
|
|
||||||
unit: (number, unit),
|
|
||||||
span,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
Expression::size(number.to_number(source), *unit).into_expr(span)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for UnitSyntax {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::typed(
|
|
||||||
"unit",
|
|
||||||
self.unit.0.pretty_debug(source) + b::space() + self.unit.1.pretty_debug(source),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for UnitSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct UnitExpressionShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for UnitExpressionShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"unit expression"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(UnitShape)
|
|
||||||
.map(|unit| unit.into_expr(&token_nodes.source()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct UnitShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for UnitShape {
|
|
||||||
type Output = Result<UnitSyntax, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"unit"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<UnitSyntax, ParseError> {
|
|
||||||
let source = token_nodes.source();
|
|
||||||
|
|
||||||
token_nodes.expand_token(BareType, |span| {
|
|
||||||
let unit = unit_size(span.slice(&source), span);
|
|
||||||
|
|
||||||
let (_, (number, unit)) = match unit {
|
|
||||||
Err(_) => return Err(ParseError::mismatch("unit", "word".spanned(span))),
|
|
||||||
Ok((number, unit)) => (number, unit),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
FlatShape::Size {
|
|
||||||
number: number.span(),
|
|
||||||
unit: unit.span,
|
|
||||||
},
|
|
||||||
UnitSyntax {
|
|
||||||
unit: (number, unit),
|
|
||||||
span,
|
|
||||||
},
|
|
||||||
))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (RawNumber, Spanned<Unit>)> {
|
|
||||||
let (input, digits) = digit1(input)?;
|
|
||||||
|
|
||||||
let (input, dot) = opt(tag("."))(input)?;
|
|
||||||
|
|
||||||
let (input, number) = match dot {
|
|
||||||
Some(dot) => {
|
|
||||||
let (input, rest) = digit1(input)?;
|
|
||||||
(
|
|
||||||
input,
|
|
||||||
RawNumber::decimal(Span::new(
|
|
||||||
bare_span.start(),
|
|
||||||
bare_span.start() + digits.len() + dot.len() + rest.len(),
|
|
||||||
)),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
None => (
|
|
||||||
input,
|
|
||||||
RawNumber::int(Span::new(
|
|
||||||
bare_span.start(),
|
|
||||||
bare_span.start() + digits.len(),
|
|
||||||
)),
|
|
||||||
),
|
|
||||||
};
|
|
||||||
|
|
||||||
let (input, unit) = all_consuming(alt((
|
|
||||||
value(Unit::Byte, alt((tag("B"), tag("b")))),
|
|
||||||
value(Unit::Kilobyte, alt((tag("KB"), tag("kb"), tag("Kb")))),
|
|
||||||
value(Unit::Megabyte, alt((tag("MB"), tag("mb"), tag("Mb")))),
|
|
||||||
value(Unit::Gigabyte, alt((tag("GB"), tag("gb"), tag("Gb")))),
|
|
||||||
value(Unit::Terabyte, alt((tag("TB"), tag("tb"), tag("Tb")))),
|
|
||||||
value(Unit::Petabyte, alt((tag("PB"), tag("pb"), tag("Pb")))),
|
|
||||||
value(Unit::Second, tag("s")),
|
|
||||||
value(Unit::Minute, tag("m")),
|
|
||||||
value(Unit::Hour, tag("h")),
|
|
||||||
value(Unit::Day, tag("d")),
|
|
||||||
value(Unit::Week, tag("w")),
|
|
||||||
value(Unit::Month, tag("M")),
|
|
||||||
value(Unit::Year, tag("y")),
|
|
||||||
)))(input)?;
|
|
||||||
|
|
||||||
let start_span = number.span().end();
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
input,
|
|
||||||
(number, unit.spanned(Span::new(start_span, bare_span.end()))),
|
|
||||||
))
|
|
||||||
}
|
|
@ -1,632 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::{
|
|
||||||
AnyExpressionShape, BareShape, ExpandSyntax, FlatShape, IntShape, ParseError, StringShape,
|
|
||||||
WhitespaceShape,
|
|
||||||
};
|
|
||||||
use crate::hir::{Expression, SpannedExpression, TokensIterator};
|
|
||||||
use crate::parse::token_tree::{CompareOperatorType, DotDotType, DotType, ItVarType, VarType};
|
|
||||||
use crate::{hir, CompareOperator};
|
|
||||||
use nu_protocol::{PathMember, ShellTypeName};
|
|
||||||
use nu_source::{
|
|
||||||
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
|
||||||
Tag, Tagged, TaggedItem, Text,
|
|
||||||
};
|
|
||||||
use num_bigint::BigInt;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct VariablePathShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for VariablePathShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"variable path"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
// 1. let the head be the first token, expecting a variable
|
|
||||||
// 2. let the tail be an empty list of members
|
|
||||||
// 2. while the next token (excluding ws) is a dot:
|
|
||||||
// 1. consume the dot
|
|
||||||
// 2. consume the next token as a member and push it onto tail
|
|
||||||
|
|
||||||
let head = token_nodes.expand_syntax(VariableShape)?;
|
|
||||||
let start = head.span;
|
|
||||||
let mut end = start;
|
|
||||||
let mut tail: Vec<PathMember> = vec![];
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if token_nodes.expand_syntax(DotShape).is_err() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let member = token_nodes.expand_syntax(MemberShape)?;
|
|
||||||
let member = member.to_path_member(&token_nodes.source());
|
|
||||||
|
|
||||||
end = member.span;
|
|
||||||
tail.push(member);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Expression::path(head, tail).into_expr(start.until(end)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct PathTailShape;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct PathTailSyntax {
|
|
||||||
pub tail: Vec<PathMember>,
|
|
||||||
pub span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for PathTailSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for PathTailSyntax {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
b::typed("tail", b::intersperse(self.tail.iter(), b::space()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandSyntax for PathTailShape {
|
|
||||||
type Output = Result<PathTailSyntax, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"path continuation"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<PathTailSyntax, ParseError> {
|
|
||||||
let mut end: Option<Span> = None;
|
|
||||||
let mut tail: Vec<PathMember> = vec![];
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if token_nodes.expand_syntax(DotShape).is_err() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let member = token_nodes.expand_syntax(MemberShape)?;
|
|
||||||
let member = member.to_path_member(&token_nodes.source());
|
|
||||||
end = Some(member.span);
|
|
||||||
tail.push(member);
|
|
||||||
}
|
|
||||||
|
|
||||||
match end {
|
|
||||||
None => Err(token_nodes.err_next_token("path continuation")),
|
|
||||||
|
|
||||||
Some(end) => Ok(PathTailSyntax { tail, span: end }),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ContinuationSyntax {
|
|
||||||
kind: ContinuationSyntaxKind,
|
|
||||||
span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ContinuationSyntax {
|
|
||||||
pub fn append_to(self, expr: SpannedExpression) -> SpannedExpression {
|
|
||||||
match self.kind {
|
|
||||||
ContinuationSyntaxKind::Infix(op, right) => {
|
|
||||||
let span = expr.span.until(right.span);
|
|
||||||
Expression::infix(expr, op, right).into_expr(span)
|
|
||||||
}
|
|
||||||
ContinuationSyntaxKind::Dot(_, member) => {
|
|
||||||
let span = expr.span.until(member.span);
|
|
||||||
Expression::dot_member(expr, member).into_expr(span)
|
|
||||||
}
|
|
||||||
ContinuationSyntaxKind::DotDot(_, right) => {
|
|
||||||
let span = expr.span.until(right.span);
|
|
||||||
Expression::range(expr, span, right).into_expr(span)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for ContinuationSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for ContinuationSyntax {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::typed("continuation", self.kind.pretty_debug(source))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum ContinuationSyntaxKind {
|
|
||||||
Infix(Spanned<CompareOperator>, SpannedExpression),
|
|
||||||
Dot(Span, PathMember),
|
|
||||||
DotDot(Span, SpannedExpression),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for ContinuationSyntaxKind {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
match self {
|
|
||||||
ContinuationSyntaxKind::Infix(op, expr) => {
|
|
||||||
b::operator(op.span.slice(source)) + expr.pretty_debug(source)
|
|
||||||
}
|
|
||||||
ContinuationSyntaxKind::Dot(span, member) => {
|
|
||||||
b::operator(span.slice(source)) + member.pretty_debug(source)
|
|
||||||
}
|
|
||||||
ContinuationSyntaxKind::DotDot(span, expr) => {
|
|
||||||
b::operator(span.slice(source)) + expr.pretty_debug(source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An expression continuation
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ExpressionContinuationShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ExpressionContinuationShape {
|
|
||||||
type Output = Result<ContinuationSyntax, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"expression continuation"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<ContinuationSyntax, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
// Try to expand a `.`
|
|
||||||
let dot = token_nodes.expand_syntax(DotShape);
|
|
||||||
|
|
||||||
if let Ok(dot) = dot {
|
|
||||||
// If a `.` was matched, it's a `Path`, and we expect a `Member` next
|
|
||||||
let syntax = token_nodes.expand_syntax(MemberShape)?;
|
|
||||||
let member = syntax.to_path_member(&token_nodes.source());
|
|
||||||
let member_span = member.span;
|
|
||||||
|
|
||||||
return Ok(ContinuationSyntax {
|
|
||||||
kind: ContinuationSyntaxKind::Dot(dot, member),
|
|
||||||
span: dot.until(member_span),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to expand a `..`
|
|
||||||
let dot = token_nodes.expand_syntax(DotDotShape);
|
|
||||||
|
|
||||||
if let Ok(dotdot) = dot {
|
|
||||||
// If a `..` was matched, it's a `Range`, and we expect an `Expression` next
|
|
||||||
let expr = token_nodes.expand_syntax(AnyExpressionShape)?;
|
|
||||||
let expr_span = expr.span;
|
|
||||||
|
|
||||||
return Ok(ContinuationSyntax {
|
|
||||||
kind: ContinuationSyntaxKind::DotDot(dotdot, expr),
|
|
||||||
span: dotdot.until(expr_span),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, we expect an infix operator and an expression next
|
|
||||||
let (_, op, _) = token_nodes.expand_syntax(InfixShape)?.infix.item;
|
|
||||||
let next = token_nodes.expand_syntax(AnyExpressionShape)?;
|
|
||||||
let next_span = next.span;
|
|
||||||
|
|
||||||
Ok(ContinuationSyntax {
|
|
||||||
kind: ContinuationSyntaxKind::Infix(op.operator, next),
|
|
||||||
span: op.operator.span.until(next_span),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct VariableShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for VariableShape {
|
|
||||||
type Output = Result<SpannedExpression, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"variable"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<SpannedExpression, ParseError> {
|
|
||||||
token_nodes
|
|
||||||
.expand_token(ItVarType, |(inner, outer)| {
|
|
||||||
Ok((
|
|
||||||
FlatShape::ItVariable,
|
|
||||||
Expression::it_variable(inner).into_expr(outer),
|
|
||||||
))
|
|
||||||
})
|
|
||||||
.or_else(|_| {
|
|
||||||
token_nodes.expand_token(VarType, |(inner, outer)| {
|
|
||||||
Ok((
|
|
||||||
FlatShape::Variable,
|
|
||||||
Expression::variable(inner).into_expr(outer),
|
|
||||||
))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
|
||||||
pub enum Member {
|
|
||||||
String(/* outer */ Span, /* inner */ Span),
|
|
||||||
Int(BigInt, Span),
|
|
||||||
Bare(Span),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ShellTypeName for Member {
|
|
||||||
fn type_name(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Member::String(_, _) => "string",
|
|
||||||
Member::Int(_, _) => "integer",
|
|
||||||
Member::Bare(_) => "word",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Member {
|
|
||||||
pub fn int(span: Span, source: &Text) -> Member {
|
|
||||||
if let Ok(big_int) = BigInt::from_str(span.slice(source)) {
|
|
||||||
Member::Int(big_int, span)
|
|
||||||
} else {
|
|
||||||
unreachable!("Internal error: could not convert text to BigInt as expected")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_path_member(&self, source: &Text) -> PathMember {
|
|
||||||
match self {
|
|
||||||
Member::String(outer, inner) => PathMember::string(inner.slice(source), *outer),
|
|
||||||
Member::Int(int, span) => PathMember::int(int.clone(), *span),
|
|
||||||
Member::Bare(span) => PathMember::string(span.slice(source), *span),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Member {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
match self {
|
|
||||||
Member::String(outer, _) => b::value(outer.slice(source)),
|
|
||||||
Member::Int(int, _) => b::value(format!("{}", int)),
|
|
||||||
Member::Bare(span) => b::value(span.slice(source)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for Member {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
Member::String(outer, ..) => *outer,
|
|
||||||
Member::Int(_, int) => *int,
|
|
||||||
Member::Bare(name) => *name,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Member {
|
|
||||||
pub fn to_expr(&self) -> hir::SpannedExpression {
|
|
||||||
match self {
|
|
||||||
Member::String(outer, inner) => Expression::string(*inner).into_expr(outer),
|
|
||||||
Member::Int(number, span) => Expression::number(number.clone()).into_expr(span),
|
|
||||||
Member::Bare(span) => Expression::string(*span).into_expr(span),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
Member::String(outer, _inner) => *outer,
|
|
||||||
Member::Int(_, span) => *span,
|
|
||||||
Member::Bare(span) => *span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum ColumnPathState {
|
|
||||||
Initial,
|
|
||||||
LeadingDot(Span),
|
|
||||||
Dot(Span, Vec<Member>, Span),
|
|
||||||
Member(Span, Vec<Member>),
|
|
||||||
Error(ParseError),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColumnPathState {
|
|
||||||
pub fn dot(self, dot: Span) -> ColumnPathState {
|
|
||||||
match self {
|
|
||||||
ColumnPathState::Initial => ColumnPathState::LeadingDot(dot),
|
|
||||||
ColumnPathState::LeadingDot(_) => {
|
|
||||||
ColumnPathState::Error(ParseError::mismatch("column", "dot".spanned(dot)))
|
|
||||||
}
|
|
||||||
ColumnPathState::Dot(..) => {
|
|
||||||
ColumnPathState::Error(ParseError::mismatch("column", "dot".spanned(dot)))
|
|
||||||
}
|
|
||||||
ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot),
|
|
||||||
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn member(self, member: Member) -> ColumnPathState {
|
|
||||||
match self {
|
|
||||||
ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]),
|
|
||||||
ColumnPathState::LeadingDot(tag) => {
|
|
||||||
ColumnPathState::Member(tag.until(member.span()), vec![member])
|
|
||||||
}
|
|
||||||
|
|
||||||
ColumnPathState::Dot(tag, mut tags, _) => {
|
|
||||||
ColumnPathState::Member(tag.until(member.span()), {
|
|
||||||
tags.push(member);
|
|
||||||
tags
|
|
||||||
})
|
|
||||||
}
|
|
||||||
ColumnPathState::Member(..) => ColumnPathState::Error(ParseError::mismatch(
|
|
||||||
"column",
|
|
||||||
member.type_name().spanned(member.span()),
|
|
||||||
)),
|
|
||||||
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn into_path(self, err: ParseError) -> Result<Tagged<Vec<Member>>, ParseError> {
|
|
||||||
match self {
|
|
||||||
ColumnPathState::Initial => Err(err),
|
|
||||||
ColumnPathState::LeadingDot(dot) => {
|
|
||||||
Err(ParseError::mismatch("column", "dot".spanned(dot)))
|
|
||||||
}
|
|
||||||
ColumnPathState::Dot(_tag, _members, dot) => {
|
|
||||||
Err(ParseError::mismatch("column", "dot".spanned(dot)))
|
|
||||||
}
|
|
||||||
ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)),
|
|
||||||
ColumnPathState::Error(err) => Err(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ColumnPathShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for ColumnPathShape {
|
|
||||||
type Output = Result<ColumnPathSyntax, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"column path"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<ColumnPathSyntax, ParseError> {
|
|
||||||
let mut state = ColumnPathState::Initial;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let member = token_nodes.expand_syntax(MemberShape);
|
|
||||||
|
|
||||||
match member {
|
|
||||||
Err(_) => break,
|
|
||||||
Ok(member) => state = state.member(member),
|
|
||||||
}
|
|
||||||
|
|
||||||
let dot = token_nodes.expand_syntax(DotShape);
|
|
||||||
|
|
||||||
match dot {
|
|
||||||
Err(_) => break,
|
|
||||||
Ok(dot) => state = state.dot(dot),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = state.into_path(token_nodes.err_next_token("column path"))?;
|
|
||||||
|
|
||||||
Ok(ColumnPathSyntax {
|
|
||||||
path: path.item,
|
|
||||||
tag: path.tag,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ColumnPathSyntax {
|
|
||||||
pub path: Vec<Member>,
|
|
||||||
pub tag: Tag,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for ColumnPathSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.tag.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for ColumnPathSyntax {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::typed(
|
|
||||||
"column path",
|
|
||||||
b::intersperse(
|
|
||||||
self.path.iter().map(|member| member.pretty_debug(source)),
|
|
||||||
b::space(),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct MemberShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for MemberShape {
|
|
||||||
type Output = Result<Member, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"column"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &mut TokensIterator<'_>) -> Result<Member, ParseError> {
|
|
||||||
if let Ok(int) = token_nodes.expand_syntax(IntMemberShape) {
|
|
||||||
return Ok(int);
|
|
||||||
}
|
|
||||||
|
|
||||||
let bare = token_nodes.expand_syntax(BareShape);
|
|
||||||
|
|
||||||
if let Ok(bare) = bare {
|
|
||||||
return Ok(Member::Bare(bare.span()));
|
|
||||||
}
|
|
||||||
|
|
||||||
let string = token_nodes.expand_syntax(StringShape);
|
|
||||||
|
|
||||||
if let Ok(syntax) = string {
|
|
||||||
return Ok(Member::String(syntax.span, syntax.inner));
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(token_nodes.peek().type_error("column"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct IntMemberShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for IntMemberShape {
|
|
||||||
type Output = Result<Member, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"integer member"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<Member, ParseError> {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(IntShape)
|
|
||||||
.map(|int| Member::int(int.span(), &token_nodes.source()))
|
|
||||||
.or_else(|_| Err(token_nodes.err_next_token("integer member")))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct DotShape;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct ColorableDotShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for DotShape {
|
|
||||||
type Output = Result<Span, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"dot"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
|
||||||
token_nodes.expand_token(DotType, |token| Ok((FlatShape::Dot, token.span())))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
struct DotDotShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for DotDotShape {
|
|
||||||
type Output = Result<Span, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"dotdot"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
|
||||||
token_nodes.expand_token(DotDotType, |token| Ok((FlatShape::DotDot, token.span())))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct InfixShape;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct InfixSyntax {
|
|
||||||
infix: Spanned<(Span, InfixInnerSyntax, Span)>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for InfixSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.infix.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for InfixSyntax {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
self.infix.1.pretty_debug(source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandSyntax for InfixShape {
|
|
||||||
type Output = Result<InfixSyntax, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"infix operator"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<InfixSyntax, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
// An infix operator must be prefixed by whitespace
|
|
||||||
let start = token_nodes.expand_syntax(WhitespaceShape)?;
|
|
||||||
|
|
||||||
// Parse the next TokenNode after the whitespace
|
|
||||||
let operator = token_nodes.expand_syntax(InfixInnerShape)?;
|
|
||||||
|
|
||||||
// An infix operator must be followed by whitespace
|
|
||||||
let end = token_nodes.expand_syntax(WhitespaceShape)?;
|
|
||||||
|
|
||||||
Ok(InfixSyntax {
|
|
||||||
infix: (start, operator, end).spanned(start.until(end)),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct InfixInnerSyntax {
|
|
||||||
pub operator: Spanned<CompareOperator>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for InfixInnerSyntax {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.operator.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for InfixInnerSyntax {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
self.operator.pretty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct InfixInnerShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for InfixInnerShape {
|
|
||||||
type Output = Result<InfixInnerSyntax, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"infix inner"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
) -> Result<InfixInnerSyntax, ParseError> {
|
|
||||||
token_nodes.expand_token(CompareOperatorType, |(span, operator)| {
|
|
||||||
Ok((
|
|
||||||
FlatShape::CompareOperator,
|
|
||||||
InfixInnerSyntax {
|
|
||||||
operator: operator.spanned(span),
|
|
||||||
},
|
|
||||||
))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,193 +0,0 @@
|
|||||||
use crate::parse::flag::{Flag, FlagKind};
|
|
||||||
use crate::parse::number::RawNumber;
|
|
||||||
use crate::parse::operator::EvaluationOperator;
|
|
||||||
use crate::parse::token_tree::{Delimiter, SpannedToken, Token};
|
|
||||||
use nu_protocol::ShellTypeName;
|
|
||||||
use nu_source::{DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem, Text};
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub enum FlatShape {
|
|
||||||
OpenDelimiter(Delimiter),
|
|
||||||
CloseDelimiter(Delimiter),
|
|
||||||
Type,
|
|
||||||
Identifier,
|
|
||||||
ItVariable,
|
|
||||||
Variable,
|
|
||||||
CompareOperator,
|
|
||||||
Dot,
|
|
||||||
DotDot,
|
|
||||||
InternalCommand,
|
|
||||||
ExternalCommand,
|
|
||||||
ExternalWord,
|
|
||||||
BareMember,
|
|
||||||
StringMember,
|
|
||||||
String,
|
|
||||||
Path,
|
|
||||||
Word,
|
|
||||||
Keyword,
|
|
||||||
Pipe,
|
|
||||||
GlobPattern,
|
|
||||||
Flag,
|
|
||||||
ShorthandFlag,
|
|
||||||
Int,
|
|
||||||
Decimal,
|
|
||||||
Garbage,
|
|
||||||
Whitespace,
|
|
||||||
Separator,
|
|
||||||
Comment,
|
|
||||||
Size { number: Span, unit: Span },
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum ShapeResult {
|
|
||||||
Success(Spanned<FlatShape>),
|
|
||||||
Fallback {
|
|
||||||
shape: Spanned<FlatShape>,
|
|
||||||
allowed: Vec<String>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for ShapeResult {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
ShapeResult::Success(shape) => shape.span,
|
|
||||||
ShapeResult::Fallback { shape, .. } => shape.span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for FlatShape {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
unimplemented!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct TraceShape {
|
|
||||||
shape: FlatShape,
|
|
||||||
span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ShellTypeName for TraceShape {
|
|
||||||
fn type_name(&self) -> &'static str {
|
|
||||||
self.shape.type_name()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for TraceShape {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
self.shape.pretty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for TraceShape {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ShellTypeName for FlatShape {
|
|
||||||
fn type_name(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
FlatShape::OpenDelimiter(Delimiter::Brace) => "open brace",
|
|
||||||
FlatShape::OpenDelimiter(Delimiter::Paren) => "open paren",
|
|
||||||
FlatShape::OpenDelimiter(Delimiter::Square) => "open square",
|
|
||||||
FlatShape::CloseDelimiter(Delimiter::Brace) => "close brace",
|
|
||||||
FlatShape::CloseDelimiter(Delimiter::Paren) => "close paren",
|
|
||||||
FlatShape::CloseDelimiter(Delimiter::Square) => "close square",
|
|
||||||
FlatShape::Type => "type",
|
|
||||||
FlatShape::Identifier => "identifier",
|
|
||||||
FlatShape::ItVariable => "$it",
|
|
||||||
FlatShape::Variable => "variable",
|
|
||||||
FlatShape::CompareOperator => "comparison",
|
|
||||||
FlatShape::Dot => "dot",
|
|
||||||
FlatShape::DotDot => "dotdot",
|
|
||||||
FlatShape::InternalCommand => "internal command",
|
|
||||||
FlatShape::ExternalCommand => "external command",
|
|
||||||
FlatShape::ExternalWord => "external word",
|
|
||||||
FlatShape::BareMember => "bare member",
|
|
||||||
FlatShape::StringMember => "string member",
|
|
||||||
FlatShape::String => "string",
|
|
||||||
FlatShape::Path => "path",
|
|
||||||
FlatShape::Word => "word",
|
|
||||||
FlatShape::Keyword => "keyword",
|
|
||||||
FlatShape::Pipe => "pipe",
|
|
||||||
FlatShape::GlobPattern => "glob",
|
|
||||||
FlatShape::Flag => "flag",
|
|
||||||
FlatShape::ShorthandFlag => "shorthand flag",
|
|
||||||
FlatShape::Int => "int",
|
|
||||||
FlatShape::Decimal => "decimal",
|
|
||||||
FlatShape::Garbage => "garbage",
|
|
||||||
FlatShape::Whitespace => "whitespace",
|
|
||||||
FlatShape::Separator => "separator",
|
|
||||||
FlatShape::Comment => "comment",
|
|
||||||
FlatShape::Size { .. } => "size",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FlatShape {
|
|
||||||
pub fn into_trace_shape(self, span: Span) -> TraceShape {
|
|
||||||
TraceShape { shape: self, span }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn shapes(token: &SpannedToken, source: &Text) -> Vec<Spanned<FlatShape>> {
|
|
||||||
let mut shapes = vec![];
|
|
||||||
|
|
||||||
FlatShape::from(token, source, &mut shapes);
|
|
||||||
shapes
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from(token: &SpannedToken, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) {
|
|
||||||
let span = token.span();
|
|
||||||
|
|
||||||
match token.unspanned() {
|
|
||||||
Token::Number(RawNumber::Int(_)) => shapes.push(FlatShape::Int.spanned(span)),
|
|
||||||
Token::Number(RawNumber::Decimal(_)) => shapes.push(FlatShape::Decimal.spanned(span)),
|
|
||||||
Token::EvaluationOperator(EvaluationOperator::Dot) => {
|
|
||||||
shapes.push(FlatShape::Dot.spanned(span))
|
|
||||||
}
|
|
||||||
Token::EvaluationOperator(EvaluationOperator::DotDot) => {
|
|
||||||
shapes.push(FlatShape::DotDot.spanned(span))
|
|
||||||
}
|
|
||||||
Token::CompareOperator(_) => shapes.push(FlatShape::CompareOperator.spanned(span)),
|
|
||||||
Token::String(_) => shapes.push(FlatShape::String.spanned(span)),
|
|
||||||
Token::Variable(v) if v.slice(source) == "it" => {
|
|
||||||
shapes.push(FlatShape::ItVariable.spanned(span))
|
|
||||||
}
|
|
||||||
Token::Variable(_) => shapes.push(FlatShape::Variable.spanned(span)),
|
|
||||||
Token::ItVariable(_) => shapes.push(FlatShape::ItVariable.spanned(span)),
|
|
||||||
Token::ExternalCommand(_) => shapes.push(FlatShape::ExternalCommand.spanned(span)),
|
|
||||||
Token::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(span)),
|
|
||||||
Token::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(span)),
|
|
||||||
Token::Bare => shapes.push(FlatShape::Word.spanned(span)),
|
|
||||||
Token::Call(_) => unimplemented!(),
|
|
||||||
Token::Delimited(v) => {
|
|
||||||
shapes.push(FlatShape::OpenDelimiter(v.delimiter).spanned(v.spans.0));
|
|
||||||
for token in &v.children {
|
|
||||||
FlatShape::from(token, source, shapes);
|
|
||||||
}
|
|
||||||
shapes.push(FlatShape::CloseDelimiter(v.delimiter).spanned(v.spans.1));
|
|
||||||
}
|
|
||||||
Token::Pipeline(pipeline) => {
|
|
||||||
for part in &pipeline.parts {
|
|
||||||
if part.pipe.is_some() {
|
|
||||||
shapes.push(FlatShape::Pipe.spanned(part.span()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Token::Flag(Flag {
|
|
||||||
kind: FlagKind::Longhand,
|
|
||||||
..
|
|
||||||
}) => shapes.push(FlatShape::Flag.spanned(span)),
|
|
||||||
Token::Flag(Flag {
|
|
||||||
kind: FlagKind::Shorthand,
|
|
||||||
..
|
|
||||||
}) => shapes.push(FlatShape::ShorthandFlag.spanned(span)),
|
|
||||||
Token::Garbage => shapes.push(FlatShape::Garbage.spanned(span)),
|
|
||||||
Token::Whitespace => shapes.push(FlatShape::Whitespace.spanned(span)),
|
|
||||||
Token::Separator => shapes.push(FlatShape::Separator.spanned(span)),
|
|
||||||
Token::Comment(_) => shapes.push(FlatShape::Comment.spanned(span)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,602 +0,0 @@
|
|||||||
pub(crate) mod debug;
|
|
||||||
pub(crate) mod into_shapes;
|
|
||||||
pub(crate) mod pattern;
|
|
||||||
pub(crate) mod state;
|
|
||||||
|
|
||||||
use self::debug::ExpandTracer;
|
|
||||||
use self::into_shapes::IntoShapes;
|
|
||||||
use self::state::{Peeked, TokensIteratorState};
|
|
||||||
|
|
||||||
use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
|
||||||
use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax, ExpressionListShape};
|
|
||||||
use crate::hir::SpannedExpression;
|
|
||||||
use crate::parse::token_tree::{BlockType, DelimitedNode, SpannedToken, SquareType, TokenType};
|
|
||||||
|
|
||||||
use getset::{Getters, MutGetters};
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_protocol::SpannedTypeName;
|
|
||||||
use nu_source::{
|
|
||||||
HasFallibleSpan, HasSpan, IntoSpanned, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
|
||||||
};
|
|
||||||
use std::borrow::Borrow;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[derive(Getters, MutGetters, Clone, Debug)]
|
|
||||||
pub struct TokensIterator<'content> {
|
|
||||||
#[get = "pub"]
|
|
||||||
#[get_mut = "pub"]
|
|
||||||
state: TokensIteratorState<'content>,
|
|
||||||
#[get = "pub"]
|
|
||||||
#[get_mut = "pub"]
|
|
||||||
expand_tracer: ExpandTracer<SpannedExpression>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Checkpoint<'content, 'me> {
|
|
||||||
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
|
||||||
index: usize,
|
|
||||||
seen: indexmap::IndexSet<usize>,
|
|
||||||
|
|
||||||
shape_start: usize,
|
|
||||||
committed: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'content, 'me> Checkpoint<'content, 'me> {
|
|
||||||
pub(crate) fn commit(mut self) {
|
|
||||||
self.committed = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
if !self.committed {
|
|
||||||
let state = &mut self.iterator.state;
|
|
||||||
|
|
||||||
state.index = self.index;
|
|
||||||
state.seen = self.seen.clone();
|
|
||||||
|
|
||||||
state.shapes.truncate(self.shape_start);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// For parse_command
|
|
||||||
impl<'content> TokensIterator<'content> {
|
|
||||||
pub fn sort_shapes(&mut self) {
|
|
||||||
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
|
||||||
// this solution.
|
|
||||||
|
|
||||||
self.state
|
|
||||||
.shapes
|
|
||||||
.sort_by(|a, b| a.span().start().cmp(&b.span().start()));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Run a block of code, retrieving the shapes that were created during the block. This is
|
|
||||||
/// used by `parse_command` to associate shapes with a particular flag.
|
|
||||||
pub fn shapes_for<'me, T>(
|
|
||||||
&'me mut self,
|
|
||||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ParseError>,
|
|
||||||
) -> (Result<T, ParseError>, Vec<ShapeResult>) {
|
|
||||||
let index = self.state.index;
|
|
||||||
let mut shapes = vec![];
|
|
||||||
let mut errors = self.state.errors.clone();
|
|
||||||
|
|
||||||
let seen = self.state.seen.clone();
|
|
||||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
|
||||||
std::mem::swap(&mut self.state.errors, &mut errors);
|
|
||||||
|
|
||||||
let checkpoint = Checkpoint {
|
|
||||||
iterator: self,
|
|
||||||
index,
|
|
||||||
seen,
|
|
||||||
committed: false,
|
|
||||||
shape_start: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
let value = block(checkpoint.iterator);
|
|
||||||
|
|
||||||
let value = match value {
|
|
||||||
Err(err) => {
|
|
||||||
drop(checkpoint);
|
|
||||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
|
||||||
std::mem::swap(&mut self.state.errors, &mut errors);
|
|
||||||
return (Err(err), vec![]);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(value) => value,
|
|
||||||
};
|
|
||||||
|
|
||||||
checkpoint.commit();
|
|
||||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
|
||||||
|
|
||||||
(Ok(value), shapes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract<T>(&mut self, f: impl Fn(&SpannedToken) -> Option<T>) -> Option<(usize, T)> {
|
|
||||||
let state = &mut self.state;
|
|
||||||
|
|
||||||
for (i, item) in state.tokens.iter().enumerate() {
|
|
||||||
if state.seen.contains(&i) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
match f(item) {
|
|
||||||
None => {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Some(value) => {
|
|
||||||
state.seen.insert(i);
|
|
||||||
return Some((i, value));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.move_to(0);
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn remove(&mut self, position: usize) {
|
|
||||||
self.state.seen.insert(position);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delimited
|
|
||||||
impl<'content> TokensIterator<'content> {
|
|
||||||
pub fn block(&mut self) -> Result<Spanned<Vec<SpannedExpression>>, ParseError> {
|
|
||||||
self.expand_token_with_token_nodes(BlockType, |node, token_nodes| {
|
|
||||||
token_nodes.delimited(node)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn square(&mut self) -> Result<Spanned<Vec<SpannedExpression>>, ParseError> {
|
|
||||||
self.expand_token_with_token_nodes(SquareType, |node, token_nodes| {
|
|
||||||
token_nodes.delimited(node)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn delimited(
|
|
||||||
&mut self,
|
|
||||||
DelimitedNode {
|
|
||||||
delimiter,
|
|
||||||
spans,
|
|
||||||
children,
|
|
||||||
}: DelimitedNode,
|
|
||||||
) -> Result<(Vec<ShapeResult>, Spanned<Vec<SpannedExpression>>), ParseError> {
|
|
||||||
let span = spans.0.until(spans.1);
|
|
||||||
let (child_shapes, expr) = self.child(children[..].spanned(span), |token_nodes| {
|
|
||||||
token_nodes.expand_infallible(ExpressionListShape).exprs
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut shapes = vec![ShapeResult::Success(
|
|
||||||
FlatShape::OpenDelimiter(delimiter).spanned(spans.0),
|
|
||||||
)];
|
|
||||||
shapes.extend(child_shapes);
|
|
||||||
shapes.push(ShapeResult::Success(
|
|
||||||
FlatShape::CloseDelimiter(delimiter).spanned(spans.1),
|
|
||||||
));
|
|
||||||
|
|
||||||
Ok((shapes, expr))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'content> TokensIterator<'content> {
|
|
||||||
pub fn new(
|
|
||||||
items: &'content [SpannedToken],
|
|
||||||
context: ExpandContext<'content>,
|
|
||||||
span: Span,
|
|
||||||
) -> TokensIterator<'content> {
|
|
||||||
let source = context.source();
|
|
||||||
|
|
||||||
TokensIterator {
|
|
||||||
state: TokensIteratorState {
|
|
||||||
tokens: items,
|
|
||||||
span,
|
|
||||||
index: 0,
|
|
||||||
seen: indexmap::IndexSet::new(),
|
|
||||||
shapes: vec![],
|
|
||||||
errors: indexmap::IndexMap::new(),
|
|
||||||
context: Arc::new(context),
|
|
||||||
},
|
|
||||||
expand_tracer: ExpandTracer::new("Expand Trace", source.clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.state.tokens.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_empty(&self) -> bool {
|
|
||||||
self.state.tokens.is_empty()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn source(&self) -> Text {
|
|
||||||
self.state.context.source().clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn context(&self) -> &ExpandContext {
|
|
||||||
&self.state.context
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn color_result(&mut self, shape: ShapeResult) {
|
|
||||||
match shape {
|
|
||||||
ShapeResult::Success(shape) => self.color_shape(shape),
|
|
||||||
ShapeResult::Fallback { shape, allowed } => self.color_err(shape, allowed),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn color_shape(&mut self, shape: Spanned<FlatShape>) {
|
|
||||||
self.with_tracer(|_, tracer| tracer.add_shape(shape.into_trace_shape(shape.span)));
|
|
||||||
self.state.shapes.push(ShapeResult::Success(shape));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn color_err(&mut self, shape: Spanned<FlatShape>, valid_shapes: Vec<String>) {
|
|
||||||
self.with_tracer(|_, tracer| tracer.add_err_shape(shape.into_trace_shape(shape.span)));
|
|
||||||
self.state.errors.insert(shape.span, valid_shapes.clone());
|
|
||||||
self.state.shapes.push(ShapeResult::Fallback {
|
|
||||||
shape,
|
|
||||||
allowed: valid_shapes,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn color_shapes(&mut self, shapes: Vec<Spanned<FlatShape>>) {
|
|
||||||
self.with_tracer(|_, tracer| {
|
|
||||||
for shape in &shapes {
|
|
||||||
tracer.add_shape(shape.into_trace_shape(shape.span))
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
for shape in &shapes {
|
|
||||||
self.state.shapes.push(ShapeResult::Success(*shape));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn child<'me, T>(
|
|
||||||
&'me mut self,
|
|
||||||
tokens: Spanned<&'me [SpannedToken]>,
|
|
||||||
block: impl FnOnce(&mut TokensIterator<'me>) -> T,
|
|
||||||
) -> (Vec<ShapeResult>, T) {
|
|
||||||
let mut shapes = vec![];
|
|
||||||
std::mem::swap(&mut shapes, &mut self.state.shapes);
|
|
||||||
|
|
||||||
let mut errors = self.state.errors.clone();
|
|
||||||
std::mem::swap(&mut errors, &mut self.state.errors);
|
|
||||||
|
|
||||||
let mut expand_tracer = ExpandTracer::new("Expand Trace", self.source());
|
|
||||||
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
|
|
||||||
|
|
||||||
let mut iterator = TokensIterator {
|
|
||||||
state: TokensIteratorState {
|
|
||||||
tokens: tokens.item,
|
|
||||||
span: tokens.span,
|
|
||||||
index: 0,
|
|
||||||
seen: indexmap::IndexSet::new(),
|
|
||||||
shapes,
|
|
||||||
errors,
|
|
||||||
context: self.state.context.clone(),
|
|
||||||
},
|
|
||||||
expand_tracer,
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = block(&mut iterator);
|
|
||||||
|
|
||||||
std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes);
|
|
||||||
std::mem::swap(&mut iterator.state.errors, &mut self.state.errors);
|
|
||||||
std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer);
|
|
||||||
|
|
||||||
(iterator.state.shapes, result)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_tracer(
|
|
||||||
&mut self,
|
|
||||||
block: impl FnOnce(&mut TokensIteratorState, &mut ExpandTracer<SpannedExpression>),
|
|
||||||
) {
|
|
||||||
let state = &mut self.state;
|
|
||||||
let tracer = &mut self.expand_tracer;
|
|
||||||
|
|
||||||
block(state, tracer)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn finish_tracer(&mut self) {
|
|
||||||
self.with_tracer(|_, tracer| tracer.finish())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn atomic_parse<'me, T, E>(
|
|
||||||
&'me mut self,
|
|
||||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, E>,
|
|
||||||
) -> Result<T, E> {
|
|
||||||
let state = &mut self.state;
|
|
||||||
|
|
||||||
let index = state.index;
|
|
||||||
|
|
||||||
let shape_start = state.shapes.len();
|
|
||||||
let seen = state.seen.clone();
|
|
||||||
|
|
||||||
let checkpoint = Checkpoint {
|
|
||||||
iterator: self,
|
|
||||||
index,
|
|
||||||
seen,
|
|
||||||
committed: false,
|
|
||||||
|
|
||||||
shape_start,
|
|
||||||
};
|
|
||||||
|
|
||||||
let value = block(checkpoint.iterator)?;
|
|
||||||
|
|
||||||
checkpoint.commit();
|
|
||||||
Ok(value)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn eof_span(&self) -> Span {
|
|
||||||
Span::new(self.state.span.end(), self.state.span.end())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn span_at_cursor(&mut self) -> Span {
|
|
||||||
let next = self.peek();
|
|
||||||
|
|
||||||
match next.node {
|
|
||||||
None => self.eof_span(),
|
|
||||||
Some(node) => node.span(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn at_end(&self) -> bool {
|
|
||||||
next_index(&self.state).is_none()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn move_to(&mut self, pos: usize) {
|
|
||||||
self.state.index = pos;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Peek the next token in the token stream and return a `Peeked`.
|
|
||||||
///
|
|
||||||
/// # Example
|
|
||||||
///
|
|
||||||
/// ```ignore
|
|
||||||
/// let peeked = token_nodes.peek().not_eof();
|
|
||||||
/// let node = peeked.node;
|
|
||||||
/// match node.unspanned() {
|
|
||||||
/// Token::Whitespace => {
|
|
||||||
/// let node = peeked.commit();
|
|
||||||
/// return Ok(node.span)
|
|
||||||
/// }
|
|
||||||
/// other => return Err(ParseError::mismatch("whitespace", node.spanned_type_name()))
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
pub fn peek<'me>(&'me mut self) -> Peeked<'content, 'me> {
|
|
||||||
let state = self.state();
|
|
||||||
let len = state.tokens.len();
|
|
||||||
let from = state.index;
|
|
||||||
|
|
||||||
let index = next_index(state);
|
|
||||||
|
|
||||||
let (node, to) = match index {
|
|
||||||
None => (None, len),
|
|
||||||
|
|
||||||
Some(to) => (Some(&state.tokens[to]), to + 1),
|
|
||||||
};
|
|
||||||
|
|
||||||
Peeked {
|
|
||||||
node,
|
|
||||||
iterator: self,
|
|
||||||
from,
|
|
||||||
to,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Produce an error corresponding to the next token.
|
|
||||||
///
|
|
||||||
/// If the next token is EOF, produce an `UnexpectedEof`. Otherwise, produce a `Mismatch`.
|
|
||||||
pub fn err_next_token(&mut self, expected: &'static str) -> ParseError {
|
|
||||||
match next_index(&self.state) {
|
|
||||||
None => ParseError::unexpected_eof(expected, self.eof_span()),
|
|
||||||
Some(index) => {
|
|
||||||
ParseError::mismatch(expected, self.state.tokens[index].spanned_type_name())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_token_with_token_nodes<
|
|
||||||
'me,
|
|
||||||
T: 'me,
|
|
||||||
U: IntoSpanned<Output = V>,
|
|
||||||
V: HasFallibleSpan,
|
|
||||||
F: IntoShapes,
|
|
||||||
>(
|
|
||||||
&'me mut self,
|
|
||||||
expected: impl TokenType<Output = T>,
|
|
||||||
block: impl FnOnce(T, &mut Self) -> Result<(F, U), ParseError>,
|
|
||||||
) -> Result<V, ParseError> {
|
|
||||||
let desc = expected.desc();
|
|
||||||
|
|
||||||
let peeked = self.peek().not_eof(desc.borrow())?;
|
|
||||||
|
|
||||||
let (shapes, val) = {
|
|
||||||
let node = peeked.node;
|
|
||||||
let type_name = node.spanned_type_name();
|
|
||||||
|
|
||||||
let func = Box::new(|| Err(ParseError::mismatch(desc.clone().into_owned(), type_name)));
|
|
||||||
|
|
||||||
match expected.extract_token_value(node, &func) {
|
|
||||||
Err(err) => return Err(err),
|
|
||||||
Ok(value) => match block(value, peeked.iterator) {
|
|
||||||
Err(err) => return Err(err),
|
|
||||||
Ok((shape, val)) => {
|
|
||||||
let span = peeked.node.span();
|
|
||||||
peeked.commit();
|
|
||||||
(shape.into_shapes(span), val.into_spanned(span))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
for shape in &shapes {
|
|
||||||
self.color_result(shape.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(val)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Expand and color a single token. Takes an `impl TokenType` and produces
|
|
||||||
/// (() | FlatShape | Vec<Spanned<FlatShape>>, Output) (or an error).
|
|
||||||
///
|
|
||||||
/// If a single FlatShape is produced, it is annotated with the span of the
|
|
||||||
/// original token. Otherwise, each FlatShape in the list must already be
|
|
||||||
/// annotated.
|
|
||||||
pub fn expand_token<'me, T, U, V, F>(
|
|
||||||
&'me mut self,
|
|
||||||
expected: impl TokenType<Output = T>,
|
|
||||||
block: impl FnOnce(T) -> Result<(F, U), ParseError>,
|
|
||||||
) -> Result<V, ParseError>
|
|
||||||
where
|
|
||||||
T: 'me,
|
|
||||||
U: IntoSpanned<Output = V>,
|
|
||||||
V: HasFallibleSpan,
|
|
||||||
F: IntoShapes,
|
|
||||||
{
|
|
||||||
self.expand_token_with_token_nodes(expected, |value, _| block(value))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn commit(&mut self, from: usize, to: usize) {
|
|
||||||
for index in from..to {
|
|
||||||
self.state.seen.insert(index);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.state.index = to;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn debug_remaining(&self) -> Vec<SpannedToken> {
|
|
||||||
let mut tokens: TokensIterator = self.clone();
|
|
||||||
tokens.move_to(0);
|
|
||||||
tokens.cloned().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Expand an `ExpandSyntax` whose output is a `Result`, producing either the shape's output
|
|
||||||
/// or a `ParseError`. If the token stream is at EOF, this method produces a ParseError
|
|
||||||
/// (`UnexpectedEof`).
|
|
||||||
///
|
|
||||||
/// You must use `expand_syntax` if the `Output` of the `ExpandSyntax` is a `Result`, but
|
|
||||||
/// it's difficult to model this in the Rust type system.
|
|
||||||
pub fn expand_syntax<U>(
|
|
||||||
&mut self,
|
|
||||||
shape: impl ExpandSyntax<Output = Result<U, ParseError>>,
|
|
||||||
) -> Result<U, ParseError>
|
|
||||||
where
|
|
||||||
U: std::fmt::Debug + HasFallibleSpan + PrettyDebugWithSource + Clone + 'static,
|
|
||||||
{
|
|
||||||
if self.at_end() {
|
|
||||||
self.with_tracer(|_, tracer| tracer.start(shape.name(), None));
|
|
||||||
self.with_tracer(|_, tracer| tracer.eof_frame());
|
|
||||||
return Err(ParseError::unexpected_eof(shape.name(), self.eof_span()));
|
|
||||||
}
|
|
||||||
|
|
||||||
let (result, added_shapes) = self.expand(shape);
|
|
||||||
|
|
||||||
match &result {
|
|
||||||
Ok(val) => self.finish_expand(val, added_shapes),
|
|
||||||
Err(err) => self.with_tracer(|_, tracer| tracer.failed(err)),
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Expand an `impl ExpandSyntax` and produce its Output. Use `expand_infallible` if the
|
|
||||||
/// `ExpandSyntax` cannot produce a `Result`. You must also use `ExpandSyntax` if EOF
|
|
||||||
/// is an error.
|
|
||||||
///
|
|
||||||
/// The purpose of `expand_infallible` is to clearly mark the infallible path through
|
|
||||||
/// and entire list of tokens that produces a fully colored version of the source.
|
|
||||||
///
|
|
||||||
/// If the `ExpandSyntax` can produce a `Result`, make sure to use `expand_syntax`,
|
|
||||||
/// which will correctly show the error in the trace.
|
|
||||||
pub fn expand_infallible<U>(&mut self, shape: impl ExpandSyntax<Output = U>) -> U
|
|
||||||
where
|
|
||||||
U: std::fmt::Debug + PrettyDebugWithSource + HasFallibleSpan + Clone + 'static,
|
|
||||||
{
|
|
||||||
let (result, added_shapes) = self.expand(shape);
|
|
||||||
|
|
||||||
self.finish_expand(&result, added_shapes);
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
fn finish_expand<V>(&mut self, val: &V, added_shapes: usize)
|
|
||||||
where
|
|
||||||
V: PrettyDebugWithSource + HasFallibleSpan + Clone,
|
|
||||||
{
|
|
||||||
self.with_tracer(|_, tracer| {
|
|
||||||
if val.maybe_span().is_some() || added_shapes > 0 {
|
|
||||||
tracer.add_result(val.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
tracer.success();
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expand<U>(&mut self, shape: impl ExpandSyntax<Output = U>) -> (U, usize)
|
|
||||||
where
|
|
||||||
U: std::fmt::Debug + Clone + 'static,
|
|
||||||
{
|
|
||||||
let desc = shape.name();
|
|
||||||
self.with_tracer(|state, tracer| {
|
|
||||||
tracer.start(
|
|
||||||
desc,
|
|
||||||
next_index(state).map(|index| state.tokens[index].clone()),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let start_shapes = self.state.shapes.len();
|
|
||||||
let result = shape.expand(self);
|
|
||||||
let added_shapes = self.state.shapes.len() - start_shapes;
|
|
||||||
|
|
||||||
(result, added_shapes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'content> Iterator for TokensIterator<'content> {
|
|
||||||
type Item = &'content SpannedToken;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
next(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn next_index(state: &TokensIteratorState) -> Option<usize> {
|
|
||||||
let mut to = state.index;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if to >= state.tokens.len() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
if state.seen.contains(&to) {
|
|
||||||
to += 1;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if to >= state.tokens.len() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Some(to);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn next<'me, 'content>(
|
|
||||||
iterator: &'me mut TokensIterator<'content>,
|
|
||||||
) -> Option<&'content SpannedToken> {
|
|
||||||
let next = next_index(&iterator.state);
|
|
||||||
let len = iterator.len();
|
|
||||||
|
|
||||||
match next {
|
|
||||||
None => {
|
|
||||||
iterator.move_to(len);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(index) => {
|
|
||||||
iterator.move_to(index + 1);
|
|
||||||
Some(&iterator.state.tokens[index])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,36 +0,0 @@
|
|||||||
#![allow(unused)]
|
|
||||||
|
|
||||||
pub(crate) mod color_trace;
|
|
||||||
pub(crate) mod expand_trace;
|
|
||||||
|
|
||||||
pub(crate) use self::color_trace::*;
|
|
||||||
pub(crate) use self::expand_trace::*;
|
|
||||||
|
|
||||||
use crate::hir::tokens_iterator::TokensIteratorState;
|
|
||||||
use nu_source::{PrettyDebug, PrettyDebugWithSource, Text};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) enum DebugIteratorToken {
|
|
||||||
Seen(String),
|
|
||||||
Unseen(String),
|
|
||||||
Cursor,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn debug_tokens(state: &TokensIteratorState, source: &str) -> Vec<DebugIteratorToken> {
|
|
||||||
let mut out = vec![];
|
|
||||||
|
|
||||||
for (i, token) in state.tokens.iter().enumerate() {
|
|
||||||
if state.index == i {
|
|
||||||
out.push(DebugIteratorToken::Cursor);
|
|
||||||
}
|
|
||||||
|
|
||||||
let msg = token.debug(source).to_string();
|
|
||||||
if state.seen.contains(&i) {
|
|
||||||
out.push(DebugIteratorToken::Seen(msg));
|
|
||||||
} else {
|
|
||||||
out.push(DebugIteratorToken::Unseen(msg));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
out
|
|
||||||
}
|
|
@ -1,363 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
|
||||||
use ansi_term::Color;
|
|
||||||
use log::trace;
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_source::{Spanned, Text};
|
|
||||||
use ptree::*;
|
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::io;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum FrameChild {
|
|
||||||
#[allow(unused)]
|
|
||||||
Shape(ShapeResult),
|
|
||||||
Frame(ColorFrame),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FrameChild {
|
|
||||||
fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
FrameChild::Shape(ShapeResult::Success(shape)) => write!(
|
|
||||||
f,
|
|
||||||
"{} {:?}",
|
|
||||||
Color::White
|
|
||||||
.bold()
|
|
||||||
.on(Color::Green)
|
|
||||||
.paint(format!("{:?}", shape.item)),
|
|
||||||
shape.span.slice(text)
|
|
||||||
),
|
|
||||||
|
|
||||||
FrameChild::Shape(ShapeResult::Fallback { shape, .. }) => write!(
|
|
||||||
f,
|
|
||||||
"{} {:?}",
|
|
||||||
Color::White
|
|
||||||
.bold()
|
|
||||||
.on(Color::Green)
|
|
||||||
.paint(format!("{:?}", shape.item)),
|
|
||||||
shape.span.slice(text)
|
|
||||||
),
|
|
||||||
|
|
||||||
FrameChild::Frame(frame) => frame.colored_leaf_description(f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_tree_child(self, text: &Text) -> TreeChild {
|
|
||||||
match self {
|
|
||||||
FrameChild::Shape(shape) => TreeChild::Shape(shape, text.clone()),
|
|
||||||
FrameChild::Frame(frame) => TreeChild::Frame(frame, text.clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ColorFrame {
|
|
||||||
description: &'static str,
|
|
||||||
children: Vec<FrameChild>,
|
|
||||||
error: Option<ParseError>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColorFrame {
|
|
||||||
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
|
||||||
if self.has_only_error_descendents() {
|
|
||||||
if self.children.is_empty() {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
Color::White.bold().on(Color::Red).paint(self.description)
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
write!(f, "{}", Color::Red.normal().paint(self.description))
|
|
||||||
}
|
|
||||||
} else if self.has_descendent_shapes() {
|
|
||||||
write!(f, "{}", Color::Green.normal().paint(self.description))
|
|
||||||
} else {
|
|
||||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn colored_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
|
||||||
if self.children.len() == 1 {
|
|
||||||
let child = &self.children[0];
|
|
||||||
|
|
||||||
self.colored_leaf_description(f)?;
|
|
||||||
write!(f, " -> ")?;
|
|
||||||
child.colored_leaf_description(text, f)
|
|
||||||
} else {
|
|
||||||
self.colored_leaf_description(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn children_for_formatting(&self, text: &Text) -> Vec<TreeChild> {
|
|
||||||
if self.children.len() == 1 {
|
|
||||||
let child = &self.children[0];
|
|
||||||
|
|
||||||
match child {
|
|
||||||
FrameChild::Shape(_) => vec![],
|
|
||||||
FrameChild::Frame(frame) => frame.tree_children(text),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.tree_children(text)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tree_children(&self, text: &Text) -> Vec<TreeChild> {
|
|
||||||
self.children
|
|
||||||
.clone()
|
|
||||||
.into_iter()
|
|
||||||
.map(|c| c.into_tree_child(text))
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_shape(&mut self, shape: ShapeResult) {
|
|
||||||
self.children.push(FrameChild::Shape(shape))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn has_child_shapes(&self) -> bool {
|
|
||||||
self.any_child_shape(|_| true)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn any_child_shape(&self, predicate: impl Fn(&ShapeResult) -> bool) -> bool {
|
|
||||||
for item in &self.children {
|
|
||||||
if let FrameChild::Shape(shape) = item {
|
|
||||||
if predicate(shape) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool {
|
|
||||||
for item in &self.children {
|
|
||||||
if let FrameChild::Frame(frame) = item {
|
|
||||||
if predicate(frame) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
fn has_descendent_shapes(&self) -> bool {
|
|
||||||
if self.has_child_shapes() {
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
self.any_child_frame(|frame| frame.has_descendent_shapes())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn has_only_error_descendents(&self) -> bool {
|
|
||||||
if self.children.is_empty() {
|
|
||||||
// if this frame has no children at all, it has only error descendents if this frame
|
|
||||||
// is an error
|
|
||||||
self.error.is_some()
|
|
||||||
} else {
|
|
||||||
// otherwise, it has only error descendents if all of its children terminate in an
|
|
||||||
// error (transitively)
|
|
||||||
|
|
||||||
let mut seen_error = false;
|
|
||||||
|
|
||||||
for child in &self.children {
|
|
||||||
match child {
|
|
||||||
// if this frame has at least one child shape, this frame has non-error descendents
|
|
||||||
FrameChild::Shape(_) => return false,
|
|
||||||
FrameChild::Frame(frame) => {
|
|
||||||
// if the chi
|
|
||||||
if frame.has_only_error_descendents() {
|
|
||||||
seen_error = true;
|
|
||||||
} else {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
seen_error
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum TreeChild {
|
|
||||||
Shape(ShapeResult, Text),
|
|
||||||
Frame(ColorFrame, Text),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TreeChild {
|
|
||||||
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
TreeChild::Shape(ShapeResult::Success(shape), text) => write!(
|
|
||||||
f,
|
|
||||||
"{} {:?}",
|
|
||||||
Color::White
|
|
||||||
.bold()
|
|
||||||
.on(Color::Green)
|
|
||||||
.paint(format!("{:?}", shape.item)),
|
|
||||||
shape.span.slice(text)
|
|
||||||
),
|
|
||||||
|
|
||||||
TreeChild::Shape(ShapeResult::Fallback { shape, .. }, text) => write!(
|
|
||||||
f,
|
|
||||||
"{} {:?}",
|
|
||||||
Color::White
|
|
||||||
.bold()
|
|
||||||
.on(Color::Green)
|
|
||||||
.paint(format!("{:?}", shape.item)),
|
|
||||||
shape.span.slice(text)
|
|
||||||
),
|
|
||||||
|
|
||||||
TreeChild::Frame(frame, _) => frame.colored_leaf_description(f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TreeItem for TreeChild {
|
|
||||||
type Child = TreeChild;
|
|
||||||
|
|
||||||
fn write_self<W: io::Write>(&self, f: &mut W, _style: &Style) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
shape @ TreeChild::Shape(..) => shape.colored_leaf_description(f),
|
|
||||||
|
|
||||||
TreeChild::Frame(frame, text) => frame.colored_description(text, f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn children(&self) -> Cow<[Self::Child]> {
|
|
||||||
match self {
|
|
||||||
TreeChild::Shape(..) => Cow::Borrowed(&[]),
|
|
||||||
TreeChild::Frame(frame, text) => Cow::Owned(frame.children_for_formatting(text)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ColorTracer {
|
|
||||||
frame_stack: Vec<ColorFrame>,
|
|
||||||
source: Text,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColorTracer {
|
|
||||||
pub fn print(self, source: Text) -> PrintTracer {
|
|
||||||
PrintTracer {
|
|
||||||
tracer: self,
|
|
||||||
source,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new(source: Text) -> ColorTracer {
|
|
||||||
let root = ColorFrame {
|
|
||||||
description: "Trace",
|
|
||||||
children: vec![],
|
|
||||||
error: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
ColorTracer {
|
|
||||||
frame_stack: vec![root],
|
|
||||||
source,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn current_frame(&mut self) -> &mut ColorFrame {
|
|
||||||
let frames = &mut self.frame_stack;
|
|
||||||
let last = frames.len() - 1;
|
|
||||||
&mut frames[last]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pop_frame(&mut self) -> ColorFrame {
|
|
||||||
trace!(target: "nu::color_syntax", "Popping {:#?}", self);
|
|
||||||
|
|
||||||
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
|
||||||
|
|
||||||
if self.frame_stack.is_empty() {
|
|
||||||
panic!("Can't pop root tracer frame {:#?}", self);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.debug();
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn start(&mut self, description: &'static str) {
|
|
||||||
let frame = ColorFrame {
|
|
||||||
description,
|
|
||||||
children: vec![],
|
|
||||||
error: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
self.frame_stack.push(frame);
|
|
||||||
self.debug();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn eof_frame(&mut self) {
|
|
||||||
let current = self.pop_frame();
|
|
||||||
self.current_frame()
|
|
||||||
.children
|
|
||||||
.push(FrameChild::Frame(current));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn finish(&mut self) {
|
|
||||||
loop {
|
|
||||||
if self.frame_stack.len() == 1 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let frame = self.pop_frame();
|
|
||||||
self.current_frame().children.push(FrameChild::Frame(frame));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_shape(&mut self, shape: ShapeResult) {
|
|
||||||
self.current_frame().add_shape(shape);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn success(&mut self) {
|
|
||||||
let current = self.pop_frame();
|
|
||||||
self.current_frame()
|
|
||||||
.children
|
|
||||||
.push(FrameChild::Frame(current));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn failed(&mut self, error: &ParseError) {
|
|
||||||
let mut current = self.pop_frame();
|
|
||||||
current.error = Some(error.clone());
|
|
||||||
self.current_frame()
|
|
||||||
.children
|
|
||||||
.push(FrameChild::Frame(current));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn debug(&self) {
|
|
||||||
trace!(target: "nu::color_syntax",
|
|
||||||
"frames = {:?}",
|
|
||||||
self.frame_stack
|
|
||||||
.iter()
|
|
||||||
.map(|f| f.description)
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
);
|
|
||||||
|
|
||||||
trace!(target: "nu::color_syntax", "{:#?}", self);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct PrintTracer {
|
|
||||||
tracer: ColorTracer,
|
|
||||||
source: Text,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TreeItem for PrintTracer {
|
|
||||||
type Child = TreeChild;
|
|
||||||
|
|
||||||
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
|
||||||
write!(f, "{}", style.paint("Color Trace"))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn children(&self) -> Cow<[Self::Child]> {
|
|
||||||
Cow::Owned(vec![TreeChild::Frame(
|
|
||||||
self.tracer.frame_stack[0].clone(),
|
|
||||||
self.source.clone(),
|
|
||||||
)])
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,494 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::flat_shape::TraceShape;
|
|
||||||
use crate::hir::SpannedExpression;
|
|
||||||
use crate::parse::token_tree::SpannedToken;
|
|
||||||
use ansi_term::Color;
|
|
||||||
use log::trace;
|
|
||||||
use nu_errors::{ParseError, ParseErrorReason};
|
|
||||||
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
|
||||||
use nu_source::{DebugDoc, PrettyDebug, PrettyDebugWithSource, Span, Spanned, Text};
|
|
||||||
use ptree::*;
|
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::fmt::Debug;
|
|
||||||
use std::io;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum FrameChild<T: SpannedTypeName> {
|
|
||||||
Expr(T),
|
|
||||||
Shape(Result<TraceShape, TraceShape>),
|
|
||||||
Frame(Box<ExprFrame<T>>),
|
|
||||||
Result(DebugDoc),
|
|
||||||
}
|
|
||||||
|
|
||||||
fn err_desc(error: &ParseError) -> &'static str {
|
|
||||||
match error.reason() {
|
|
||||||
ParseErrorReason::ExtraTokens { .. } => "extra tokens",
|
|
||||||
ParseErrorReason::Mismatch { .. } => "mismatch",
|
|
||||||
ParseErrorReason::ArgumentError { .. } => "argument error",
|
|
||||||
ParseErrorReason::Eof { .. } => "eof",
|
|
||||||
ParseErrorReason::InternalError { .. } => "internal error",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: SpannedTypeName> FrameChild<T> {
|
|
||||||
fn get_error_leaf(&self) -> Option<(&'static str, &'static str)> {
|
|
||||||
match self {
|
|
||||||
FrameChild::Frame(frame) => {
|
|
||||||
if let Some(error) = &frame.error {
|
|
||||||
if frame.children.is_empty() {
|
|
||||||
Some((frame.description, err_desc(error)))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_tree_child(&self, text: &Text) -> TreeChild {
|
|
||||||
match self {
|
|
||||||
FrameChild::Expr(expr) => TreeChild::OkExpr {
|
|
||||||
source: expr.spanned_type_name().span,
|
|
||||||
desc: expr.spanned_type_name().item,
|
|
||||||
text: text.clone(),
|
|
||||||
},
|
|
||||||
FrameChild::Shape(Ok(shape)) => TreeChild::OkShape {
|
|
||||||
source: shape.spanned_type_name().span,
|
|
||||||
desc: shape.spanned_type_name().item,
|
|
||||||
text: text.clone(),
|
|
||||||
fallback: false,
|
|
||||||
},
|
|
||||||
FrameChild::Shape(Err(shape)) => TreeChild::OkShape {
|
|
||||||
source: shape.spanned_type_name().span,
|
|
||||||
desc: shape.spanned_type_name().item,
|
|
||||||
text: text.clone(),
|
|
||||||
fallback: true,
|
|
||||||
},
|
|
||||||
FrameChild::Result(result) => {
|
|
||||||
let result = result.display();
|
|
||||||
TreeChild::OkNonExpr(result)
|
|
||||||
}
|
|
||||||
FrameChild::Frame(frame) => {
|
|
||||||
if let Some(err) = &frame.error {
|
|
||||||
if frame.children.is_empty() {
|
|
||||||
TreeChild::ErrorLeaf(
|
|
||||||
vec![(frame.description, err_desc(err))],
|
|
||||||
frame.token_desc(),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone())
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
TreeChild::OkFrame(frame.to_tree_frame(text), text.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ExprFrame<T: SpannedTypeName> {
|
|
||||||
description: &'static str,
|
|
||||||
token: Option<SpannedToken>,
|
|
||||||
children: Vec<FrameChild<T>>,
|
|
||||||
error: Option<ParseError>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: SpannedTypeName> ExprFrame<T> {
|
|
||||||
fn token_desc(&self) -> &'static str {
|
|
||||||
match &self.token {
|
|
||||||
None => "EOF",
|
|
||||||
Some(token) => token.type_name(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_tree_frame(&self, text: &Text) -> TreeFrame {
|
|
||||||
let mut children = vec![];
|
|
||||||
let mut errors = vec![];
|
|
||||||
|
|
||||||
for child in &self.children {
|
|
||||||
if let Some(error_leaf) = child.get_error_leaf() {
|
|
||||||
errors.push(error_leaf);
|
|
||||||
continue;
|
|
||||||
} else if !errors.is_empty() {
|
|
||||||
children.push(TreeChild::ErrorLeaf(errors, self.token_desc()));
|
|
||||||
errors = vec![];
|
|
||||||
}
|
|
||||||
|
|
||||||
children.push(child.to_tree_child(text));
|
|
||||||
}
|
|
||||||
|
|
||||||
if !errors.is_empty() {
|
|
||||||
children.push(TreeChild::ErrorLeaf(errors, self.token_desc()));
|
|
||||||
}
|
|
||||||
|
|
||||||
TreeFrame {
|
|
||||||
description: self.description,
|
|
||||||
token_desc: self.token_desc(),
|
|
||||||
children,
|
|
||||||
error: self.error.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_return(&mut self, value: T) {
|
|
||||||
self.children.push(FrameChild::Expr(value))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_shape(&mut self, shape: TraceShape) {
|
|
||||||
self.children.push(FrameChild::Shape(Ok(shape)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_err_shape(&mut self, shape: TraceShape) {
|
|
||||||
self.children.push(FrameChild::Shape(Err(shape)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_result(&mut self, result: impl PrettyDebug) {
|
|
||||||
self.children.push(FrameChild::Result(result.to_doc()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct TreeFrame {
|
|
||||||
description: &'static str,
|
|
||||||
token_desc: &'static str,
|
|
||||||
children: Vec<TreeChild>,
|
|
||||||
error: Option<ParseError>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TreeFrame {
|
|
||||||
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
|
||||||
if self.children.len() == 1 {
|
|
||||||
if self.error.is_some() {
|
|
||||||
write!(f, "{}", Color::Red.normal().paint(self.description))?;
|
|
||||||
} else if self.has_descendent_green() {
|
|
||||||
write!(f, "{}", Color::Green.normal().paint(self.description))?;
|
|
||||||
} else {
|
|
||||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
Color::White.bold().paint(&format!("({})", self.token_desc))
|
|
||||||
)?;
|
|
||||||
|
|
||||||
write!(f, " -> ")?;
|
|
||||||
self.children[0].leaf_description(f)
|
|
||||||
} else {
|
|
||||||
if self.error.is_some() {
|
|
||||||
if self.children.is_empty() {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
Color::White.bold().on(Color::Red).paint(self.description)
|
|
||||||
)?
|
|
||||||
} else {
|
|
||||||
write!(f, "{}", Color::Red.normal().paint(self.description))?
|
|
||||||
}
|
|
||||||
} else if self.has_descendent_green() {
|
|
||||||
write!(f, "{}", Color::Green.normal().paint(self.description))?
|
|
||||||
} else {
|
|
||||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))?
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
Color::White.bold().paint(&format!("({})", self.token_desc))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn has_child_green(&self) -> bool {
|
|
||||||
self.children.iter().any(|item| match item {
|
|
||||||
TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false,
|
|
||||||
TreeChild::OkExpr { .. } | TreeChild::OkShape { .. } | TreeChild::OkNonExpr(..) => true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn any_child_frame(&self, predicate: impl Fn(&TreeFrame) -> bool) -> bool {
|
|
||||||
for item in &self.children {
|
|
||||||
if let TreeChild::OkFrame(frame, ..) = item {
|
|
||||||
if predicate(frame) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
fn has_descendent_green(&self) -> bool {
|
|
||||||
if self.has_child_green() {
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
self.any_child_frame(|frame| frame.has_child_green())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn children_for_formatting(&self, text: &Text) -> Vec<TreeChild> {
|
|
||||||
if self.children.len() == 1 {
|
|
||||||
let child: &TreeChild = &self.children[0];
|
|
||||||
match child {
|
|
||||||
TreeChild::OkExpr { .. }
|
|
||||||
| TreeChild::OkShape { .. }
|
|
||||||
| TreeChild::OkNonExpr(..)
|
|
||||||
| TreeChild::ErrorLeaf(..) => vec![],
|
|
||||||
TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => {
|
|
||||||
frame.children_for_formatting(text)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.children.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum TreeChild {
|
|
||||||
OkNonExpr(String),
|
|
||||||
OkExpr {
|
|
||||||
source: Span,
|
|
||||||
desc: &'static str,
|
|
||||||
text: Text,
|
|
||||||
},
|
|
||||||
OkShape {
|
|
||||||
source: Span,
|
|
||||||
desc: &'static str,
|
|
||||||
text: Text,
|
|
||||||
fallback: bool,
|
|
||||||
},
|
|
||||||
OkFrame(TreeFrame, Text),
|
|
||||||
ErrorFrame(TreeFrame, Text),
|
|
||||||
ErrorLeaf(Vec<(&'static str, &'static str)>, &'static str),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TreeChild {
|
|
||||||
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
TreeChild::OkExpr { source, desc, text } => write!(
|
|
||||||
f,
|
|
||||||
"{} {} {}",
|
|
||||||
Color::Cyan.normal().paint("returns"),
|
|
||||||
Color::White.bold().on(Color::Green).paint(*desc),
|
|
||||||
source.slice(text)
|
|
||||||
),
|
|
||||||
|
|
||||||
TreeChild::OkShape {
|
|
||||||
source,
|
|
||||||
desc,
|
|
||||||
text,
|
|
||||||
fallback,
|
|
||||||
} => write!(
|
|
||||||
f,
|
|
||||||
"{} {} {}",
|
|
||||||
Color::Purple.normal().paint("paints"),
|
|
||||||
Color::White.bold().on(Color::Green).paint(*desc),
|
|
||||||
source.slice(text)
|
|
||||||
),
|
|
||||||
|
|
||||||
TreeChild::OkNonExpr(result) => write!(
|
|
||||||
f,
|
|
||||||
"{} {}",
|
|
||||||
Color::Cyan.normal().paint("returns"),
|
|
||||||
Color::White
|
|
||||||
.bold()
|
|
||||||
.on(Color::Green)
|
|
||||||
.paint(result.to_string())
|
|
||||||
),
|
|
||||||
|
|
||||||
TreeChild::ErrorLeaf(desc, token_desc) => {
|
|
||||||
let last = desc.len() - 1;
|
|
||||||
|
|
||||||
for (i, (desc, err_desc)) in desc.iter().enumerate() {
|
|
||||||
write!(f, "{}", Color::White.bold().on(Color::Red).paint(*desc))?;
|
|
||||||
|
|
||||||
write!(f, " {}", Color::White.bold().paint(*err_desc))?;
|
|
||||||
|
|
||||||
if i != last {
|
|
||||||
write!(f, "{}", Color::White.normal().paint(", "))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// write!(f, " {}", Color::Black.bold().paint(*token_desc))?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
TreeChild::ErrorFrame(frame, _) | TreeChild::OkFrame(frame, _) => {
|
|
||||||
frame.leaf_description(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TreeItem for TreeChild {
|
|
||||||
type Child = TreeChild;
|
|
||||||
|
|
||||||
fn write_self<W: io::Write>(&self, f: &mut W, _style: &Style) -> io::Result<()> {
|
|
||||||
self.leaf_description(f)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn children(&self) -> Cow<[Self::Child]> {
|
|
||||||
match self {
|
|
||||||
TreeChild::OkExpr { .. }
|
|
||||||
| TreeChild::OkShape { .. }
|
|
||||||
| TreeChild::OkNonExpr(..)
|
|
||||||
| TreeChild::ErrorLeaf(..) => Cow::Borrowed(&[]),
|
|
||||||
TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => {
|
|
||||||
Cow::Owned(frame.children_for_formatting(text))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct ExpandTracer<T: SpannedTypeName> {
|
|
||||||
desc: &'static str,
|
|
||||||
frame_stack: Vec<ExprFrame<T>>,
|
|
||||||
source: Text,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: SpannedTypeName + Debug> ExpandTracer<T> {
|
|
||||||
pub fn print(&self, source: Text) -> PrintTracer {
|
|
||||||
let root = self.frame_stack[0].to_tree_frame(&source);
|
|
||||||
|
|
||||||
PrintTracer {
|
|
||||||
root,
|
|
||||||
desc: self.desc,
|
|
||||||
source,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new(desc: &'static str, source: Text) -> ExpandTracer<T> {
|
|
||||||
let root = ExprFrame {
|
|
||||||
description: "Trace",
|
|
||||||
children: vec![],
|
|
||||||
token: None,
|
|
||||||
error: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
ExpandTracer {
|
|
||||||
desc,
|
|
||||||
frame_stack: vec![root],
|
|
||||||
source,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn current_frame(&mut self) -> &mut ExprFrame<T> {
|
|
||||||
let frames = &mut self.frame_stack;
|
|
||||||
let last = frames.len() - 1;
|
|
||||||
&mut frames[last]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pop_frame(&mut self) -> ExprFrame<T> {
|
|
||||||
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
|
||||||
|
|
||||||
if self.frame_stack.is_empty() {
|
|
||||||
panic!("Can't pop root tracer frame");
|
|
||||||
}
|
|
||||||
|
|
||||||
self.debug();
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn start(&mut self, description: &'static str, token: Option<SpannedToken>) {
|
|
||||||
let frame = ExprFrame {
|
|
||||||
description,
|
|
||||||
children: vec![],
|
|
||||||
token,
|
|
||||||
error: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
self.frame_stack.push(frame);
|
|
||||||
self.debug();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_return(&mut self, value: T) {
|
|
||||||
self.current_frame().add_return(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_shape(&mut self, shape: TraceShape) {
|
|
||||||
self.current_frame().add_shape(shape);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_err_shape(&mut self, shape: TraceShape) {
|
|
||||||
self.current_frame().add_err_shape(shape);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn finish(&mut self) {
|
|
||||||
loop {
|
|
||||||
if self.frame_stack.len() == 1 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let frame = self.pop_frame();
|
|
||||||
self.current_frame()
|
|
||||||
.children
|
|
||||||
.push(FrameChild::Frame(Box::new(frame)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn eof_frame(&mut self) {
|
|
||||||
let current = self.pop_frame();
|
|
||||||
self.current_frame()
|
|
||||||
.children
|
|
||||||
.push(FrameChild::Frame(Box::new(current)));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_result(&mut self, result: impl PrettyDebugWithSource) {
|
|
||||||
let source = self.source.clone();
|
|
||||||
self.current_frame().add_result(result.debuggable(source));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn success(&mut self) {
|
|
||||||
trace!(target: "parser::expand_syntax", "success {:#?}", self);
|
|
||||||
|
|
||||||
let current = self.pop_frame();
|
|
||||||
self.current_frame()
|
|
||||||
.children
|
|
||||||
.push(FrameChild::Frame(Box::new(current)));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn failed(&mut self, error: &ParseError) {
|
|
||||||
let mut current = self.pop_frame();
|
|
||||||
current.error = Some(error.clone());
|
|
||||||
self.current_frame()
|
|
||||||
.children
|
|
||||||
.push(FrameChild::Frame(Box::new(current)));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn debug(&self) {
|
|
||||||
trace!(target: "nu::parser::expand",
|
|
||||||
"frames = {:?}",
|
|
||||||
self.frame_stack
|
|
||||||
.iter()
|
|
||||||
.map(|f| f.description)
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
);
|
|
||||||
|
|
||||||
trace!(target: "nu::parser::expand", "{:#?}", self);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct PrintTracer {
|
|
||||||
desc: &'static str,
|
|
||||||
root: TreeFrame,
|
|
||||||
source: Text,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TreeItem for PrintTracer {
|
|
||||||
type Child = TreeChild;
|
|
||||||
|
|
||||||
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
|
||||||
write!(f, "{}", style.paint(self.desc))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn children(&self) -> Cow<[Self::Child]> {
|
|
||||||
Cow::Borrowed(&self.root.children)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,56 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
|
||||||
use nu_source::{Span, Spanned, SpannedItem};
|
|
||||||
|
|
||||||
pub struct FlatShapes {
|
|
||||||
shapes: Vec<ShapeResult>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> IntoIterator for &'a FlatShapes {
|
|
||||||
type Item = &'a ShapeResult;
|
|
||||||
type IntoIter = std::slice::Iter<'a, ShapeResult>;
|
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
self.shapes.iter()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait IntoShapes: 'static {
|
|
||||||
fn into_shapes(self, span: Span) -> FlatShapes;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoShapes for FlatShape {
|
|
||||||
fn into_shapes(self, span: Span) -> FlatShapes {
|
|
||||||
FlatShapes {
|
|
||||||
shapes: vec![ShapeResult::Success(self.spanned(span))],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoShapes for Vec<Spanned<FlatShape>> {
|
|
||||||
fn into_shapes(self, _span: Span) -> FlatShapes {
|
|
||||||
FlatShapes {
|
|
||||||
shapes: self.into_iter().map(ShapeResult::Success).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoShapes for Vec<ShapeResult> {
|
|
||||||
fn into_shapes(self, _span: Span) -> FlatShapes {
|
|
||||||
FlatShapes { shapes: self }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoShapes for () {
|
|
||||||
fn into_shapes(self, _span: Span) -> FlatShapes {
|
|
||||||
FlatShapes { shapes: vec![] }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoShapes for Option<FlatShape> {
|
|
||||||
fn into_shapes(self, span: Span) -> FlatShapes {
|
|
||||||
match self {
|
|
||||||
Option::None => ().into_shapes(span),
|
|
||||||
Option::Some(shape) => shape.into_shapes(span),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,30 +0,0 @@
|
|||||||
use crate::parse::token_tree::{ParseErrorFn, SpannedToken, TokenType};
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
pub struct Pattern<T> {
|
|
||||||
parts: Vec<Box<dyn TokenType<Output = T>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> TokenType for Pattern<T> {
|
|
||||||
type Output = T;
|
|
||||||
|
|
||||||
fn desc(&self) -> Cow<'static, str> {
|
|
||||||
Cow::Borrowed("pattern")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_token_value(
|
|
||||||
&self,
|
|
||||||
token: &SpannedToken,
|
|
||||||
err: ParseErrorFn<Self::Output>,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
for part in &self.parts {
|
|
||||||
match part.extract_token_value(token, err) {
|
|
||||||
Err(_) => {}
|
|
||||||
Ok(result) => return Ok(result),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
err()
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,105 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::flat_shape::ShapeResult;
|
|
||||||
use crate::hir::syntax_shape::ExpandContext;
|
|
||||||
use crate::hir::tokens_iterator::TokensIterator;
|
|
||||||
use crate::parse::token_tree::SpannedToken;
|
|
||||||
|
|
||||||
use getset::Getters;
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_protocol::SpannedTypeName;
|
|
||||||
use nu_source::Span;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[derive(Getters, Debug, Clone)]
|
|
||||||
pub struct TokensIteratorState<'content> {
|
|
||||||
pub(crate) tokens: &'content [SpannedToken],
|
|
||||||
pub(crate) span: Span,
|
|
||||||
pub(crate) index: usize,
|
|
||||||
pub(crate) seen: indexmap::IndexSet<usize>,
|
|
||||||
#[get = "pub"]
|
|
||||||
pub(crate) shapes: Vec<ShapeResult>,
|
|
||||||
pub(crate) errors: indexmap::IndexMap<Span, Vec<String>>,
|
|
||||||
pub(crate) context: Arc<ExpandContext<'content>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Peeked<'content, 'me> {
|
|
||||||
pub(crate) node: Option<&'content SpannedToken>,
|
|
||||||
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
|
||||||
pub(crate) from: usize,
|
|
||||||
pub(crate) to: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'content, 'me> Peeked<'content, 'me> {
|
|
||||||
pub fn commit(&mut self) -> Option<&'content SpannedToken> {
|
|
||||||
let Peeked {
|
|
||||||
node,
|
|
||||||
iterator,
|
|
||||||
from,
|
|
||||||
to,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
let node = (*node)?;
|
|
||||||
iterator.commit(*from, *to);
|
|
||||||
Some(node)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn rollback(self) {}
|
|
||||||
|
|
||||||
pub fn not_eof(self, expected: &str) -> Result<PeekedNode<'content, 'me>, ParseError> {
|
|
||||||
match self.node {
|
|
||||||
None => Err(ParseError::unexpected_eof(
|
|
||||||
expected.to_string(),
|
|
||||||
self.iterator.eof_span(),
|
|
||||||
)),
|
|
||||||
Some(node) => Ok(PeekedNode {
|
|
||||||
node,
|
|
||||||
iterator: self.iterator,
|
|
||||||
from: self.from,
|
|
||||||
to: self.to,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
|
||||||
peek_error(self.node, self.iterator.eof_span(), expected)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct PeekedNode<'content, 'me> {
|
|
||||||
pub(crate) node: &'content SpannedToken,
|
|
||||||
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
|
||||||
from: usize,
|
|
||||||
to: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'content, 'me> PeekedNode<'content, 'me> {
|
|
||||||
pub fn commit(self) -> &'content SpannedToken {
|
|
||||||
let PeekedNode {
|
|
||||||
node,
|
|
||||||
iterator,
|
|
||||||
from,
|
|
||||||
to,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
iterator.commit(from, to);
|
|
||||||
node
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn rollback(self) {}
|
|
||||||
|
|
||||||
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
|
||||||
peek_error(Some(self.node), self.iterator.eof_span(), expected)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn peek_error(
|
|
||||||
node: Option<&SpannedToken>,
|
|
||||||
eof_span: Span,
|
|
||||||
expected: &'static str,
|
|
||||||
) -> ParseError {
|
|
||||||
match node {
|
|
||||||
None => ParseError::unexpected_eof(expected, eof_span),
|
|
||||||
Some(node) => ParseError::mismatch(expected, node.spanned_type_name()),
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,95 +1,16 @@
|
|||||||
#[macro_use]
|
mod files;
|
||||||
pub mod macros;
|
|
||||||
|
|
||||||
pub mod commands;
|
|
||||||
pub mod hir;
|
pub mod hir;
|
||||||
pub mod parse;
|
mod lite_parse;
|
||||||
pub mod parse_command;
|
mod parse;
|
||||||
|
mod shapes;
|
||||||
|
mod signature;
|
||||||
|
|
||||||
#[cfg(test)]
|
pub use crate::files::Files;
|
||||||
pub mod test_support;
|
pub use crate::lite_parse::{lite_parse, LitePipeline};
|
||||||
|
pub use crate::parse::{
|
||||||
pub use crate::commands::classified::{
|
classify_pipeline, garbage, ClassifiedCommand, ClassifiedPipeline, Commands, InternalCommand,
|
||||||
external::ExternalCommand, internal::InternalCommand, ClassifiedCommand, ClassifiedPipeline,
|
};
|
||||||
|
pub use crate::shapes::shapes;
|
||||||
|
pub use crate::signature::{
|
||||||
|
ExternalArg, ExternalArgs, ExternalCommand, Signature, SignatureRegistry,
|
||||||
};
|
};
|
||||||
pub use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
|
||||||
pub use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax, PipelineShape, SignatureRegistry};
|
|
||||||
pub use crate::hir::tokens_iterator::TokensIterator;
|
|
||||||
pub use crate::parse::files::Files;
|
|
||||||
pub use crate::parse::flag::Flag;
|
|
||||||
pub use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
|
||||||
pub use crate::parse::parser::Number;
|
|
||||||
pub use crate::parse::parser::{module, pipeline};
|
|
||||||
pub use crate::parse::token_tree::{Delimiter, SpannedToken, Token};
|
|
||||||
pub use crate::parse::token_tree_builder::TokenTreeBuilder;
|
|
||||||
|
|
||||||
pub mod utils {
|
|
||||||
pub use crate::parse::util::parse_line_with_separator;
|
|
||||||
pub use crate::parse::util::LineSeparatedShape;
|
|
||||||
}
|
|
||||||
|
|
||||||
use log::log_enabled;
|
|
||||||
use nu_errors::ShellError;
|
|
||||||
use nu_protocol::{errln, outln};
|
|
||||||
use nu_source::{nom_input, HasSpan, Text};
|
|
||||||
|
|
||||||
pub fn pipeline_shapes(line: &str, expand_context: ExpandContext) -> Vec<ShapeResult> {
|
|
||||||
let tokens = parse_pipeline(line);
|
|
||||||
|
|
||||||
match tokens {
|
|
||||||
Err(_) => vec![],
|
|
||||||
Ok(v) => {
|
|
||||||
let pipeline = match v.as_pipeline() {
|
|
||||||
Err(_) => return vec![],
|
|
||||||
Ok(v) => v,
|
|
||||||
};
|
|
||||||
|
|
||||||
let tokens = vec![Token::Pipeline(pipeline).into_spanned(v.span())];
|
|
||||||
let mut tokens = TokensIterator::new(&tokens[..], expand_context, v.span());
|
|
||||||
|
|
||||||
let shapes = {
|
|
||||||
// We just constructed a token list that only contains a pipeline, so it can't fail
|
|
||||||
let result = tokens.expand_infallible(PipelineShape);
|
|
||||||
|
|
||||||
if let Some(failure) = result.failed {
|
|
||||||
errln!(
|
|
||||||
"BUG: PipelineShape didn't find a pipeline :: {:#?}",
|
|
||||||
failure
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
tokens.finish_tracer();
|
|
||||||
|
|
||||||
tokens.state().shapes()
|
|
||||||
};
|
|
||||||
|
|
||||||
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
|
||||||
outln!("");
|
|
||||||
let _ = ptree::print_tree(&tokens.expand_tracer().clone().print(Text::from(line)));
|
|
||||||
outln!("");
|
|
||||||
}
|
|
||||||
|
|
||||||
shapes.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_pipeline(input: &str) -> Result<SpannedToken, ShellError> {
|
|
||||||
let _ = pretty_env_logger::try_init();
|
|
||||||
|
|
||||||
match pipeline(nom_input(input)) {
|
|
||||||
Ok((_rest, val)) => Ok(val),
|
|
||||||
Err(err) => Err(ShellError::parse_error(err)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub use parse_pipeline as parse;
|
|
||||||
|
|
||||||
pub fn parse_script(input: &str) -> Result<SpannedToken, ShellError> {
|
|
||||||
let _ = pretty_env_logger::try_init();
|
|
||||||
|
|
||||||
match module(nom_input(input)) {
|
|
||||||
Ok((_rest, val)) => Ok(val),
|
|
||||||
Err(err) => Err(ShellError::parse_error(err)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
214
crates/nu-parser/src/lite_parse.rs
Normal file
214
crates/nu-parser/src/lite_parse.rs
Normal file
@ -0,0 +1,214 @@
|
|||||||
|
use std::iter::Peekable;
|
||||||
|
use std::str::CharIndices;
|
||||||
|
|
||||||
|
use nu_errors::ParseError;
|
||||||
|
use nu_source::{Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
|
type Input<'t> = Peekable<CharIndices<'t>>;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct LiteCommand {
|
||||||
|
pub name: Spanned<String>,
|
||||||
|
pub args: Vec<Spanned<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LiteCommand {
|
||||||
|
fn new(name: Spanned<String>) -> LiteCommand {
|
||||||
|
LiteCommand { name, args: vec![] }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn span(&self) -> Span {
|
||||||
|
let start = self.name.span.start();
|
||||||
|
let end = if let Some(x) = self.args.last() {
|
||||||
|
x.span.end()
|
||||||
|
} else {
|
||||||
|
self.name.span.end()
|
||||||
|
};
|
||||||
|
|
||||||
|
Span::new(start, end)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct LitePipeline {
|
||||||
|
pub commands: Vec<LiteCommand>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn skip_whitespace(src: &mut Input) {
|
||||||
|
while let Some((_, x)) = src.peek() {
|
||||||
|
if x.is_whitespace() {
|
||||||
|
let _ = src.next();
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bare(src: &mut Input, span_offset: usize) -> Result<Spanned<String>, ParseError> {
|
||||||
|
skip_whitespace(src);
|
||||||
|
|
||||||
|
let mut bare = String::new();
|
||||||
|
let start_offset = if let Some((pos, _)) = src.peek() {
|
||||||
|
*pos
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut delimiter = ' ';
|
||||||
|
let mut inside_quote = false;
|
||||||
|
let mut block_level = vec![];
|
||||||
|
|
||||||
|
for (_, c) in src {
|
||||||
|
if inside_quote {
|
||||||
|
if c == delimiter {
|
||||||
|
inside_quote = false;
|
||||||
|
}
|
||||||
|
} else if c == '\'' || c == '"' {
|
||||||
|
inside_quote = true;
|
||||||
|
delimiter = c;
|
||||||
|
} else if c == '[' {
|
||||||
|
block_level.push(c);
|
||||||
|
} else if c == ']' {
|
||||||
|
if let Some('[') = block_level.last() {
|
||||||
|
let _ = block_level.pop();
|
||||||
|
}
|
||||||
|
} else if c == '{' {
|
||||||
|
block_level.push(c);
|
||||||
|
} else if c == '}' {
|
||||||
|
if let Some('{') = block_level.last() {
|
||||||
|
let _ = block_level.pop();
|
||||||
|
}
|
||||||
|
} else if block_level.is_empty() && c.is_whitespace() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
bare.push(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
let span = Span::new(
|
||||||
|
start_offset + span_offset,
|
||||||
|
start_offset + span_offset + bare.len(),
|
||||||
|
);
|
||||||
|
Ok(bare.spanned(span))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn quoted(
|
||||||
|
src: &mut Input,
|
||||||
|
delimiter: char,
|
||||||
|
span_offset: usize,
|
||||||
|
) -> Result<Spanned<String>, ParseError> {
|
||||||
|
skip_whitespace(src);
|
||||||
|
|
||||||
|
let mut quoted_string = String::new();
|
||||||
|
let start_offset = if let Some((pos, _)) = src.peek() {
|
||||||
|
*pos
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = src.next();
|
||||||
|
|
||||||
|
let mut found_end = false;
|
||||||
|
|
||||||
|
for (_, c) in src {
|
||||||
|
if c != delimiter {
|
||||||
|
quoted_string.push(c);
|
||||||
|
} else {
|
||||||
|
found_end = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
quoted_string.insert(0, delimiter);
|
||||||
|
if found_end {
|
||||||
|
quoted_string.push(delimiter);
|
||||||
|
}
|
||||||
|
|
||||||
|
let span = Span::new(
|
||||||
|
start_offset + span_offset,
|
||||||
|
start_offset + span_offset + quoted_string.len(),
|
||||||
|
);
|
||||||
|
Ok(quoted_string.spanned(span))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(src: &mut Input, span_offset: usize) -> Result<LiteCommand, ParseError> {
|
||||||
|
let command = bare(src, span_offset)?;
|
||||||
|
if command.item.is_empty() {
|
||||||
|
Err(ParseError::unexpected_eof(
|
||||||
|
"unexpected end of input",
|
||||||
|
command.span,
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Ok(LiteCommand::new(command))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pipeline(src: &mut Input, span_offset: usize) -> Result<LitePipeline, ParseError> {
|
||||||
|
let mut commands = vec![];
|
||||||
|
|
||||||
|
skip_whitespace(src);
|
||||||
|
|
||||||
|
while src.peek().is_some() {
|
||||||
|
// If there is content there, let's parse it
|
||||||
|
|
||||||
|
let mut cmd = match command(src, span_offset) {
|
||||||
|
Ok(cmd) => cmd,
|
||||||
|
Err(e) => return Err(e),
|
||||||
|
};
|
||||||
|
|
||||||
|
loop {
|
||||||
|
skip_whitespace(src);
|
||||||
|
|
||||||
|
if let Some((_, c)) = src.peek() {
|
||||||
|
// The first character tells us a lot about each argument
|
||||||
|
match c {
|
||||||
|
'|' => {
|
||||||
|
// this is the end of this command
|
||||||
|
let _ = src.next();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
'"' | '\'' => {
|
||||||
|
let c = *c;
|
||||||
|
// quoted string
|
||||||
|
let arg = quoted(src, c, span_offset)?;
|
||||||
|
cmd.args.push(arg);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// basic argument
|
||||||
|
let arg = bare(src, span_offset)?;
|
||||||
|
cmd.args.push(arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
commands.push(cmd);
|
||||||
|
skip_whitespace(src);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(LitePipeline { commands })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lite_parse(src: &str, span_offset: usize) -> Result<LitePipeline, ParseError> {
|
||||||
|
pipeline(&mut src.char_indices().peekable(), span_offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn lite_simple_1() -> Result<(), ParseError> {
|
||||||
|
let result = lite_parse("foo", 0)?;
|
||||||
|
assert_eq!(result.commands.len(), 1);
|
||||||
|
assert_eq!(result.commands[0].name.span.start(), 0);
|
||||||
|
assert_eq!(result.commands[0].name.span.end(), 3);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn lite_simple_offset() -> Result<(), ParseError> {
|
||||||
|
let result = lite_parse("foo", 10)?;
|
||||||
|
assert_eq!(result.commands.len(), 1);
|
||||||
|
assert_eq!(result.commands[0].name.span.start(), 10);
|
||||||
|
assert_eq!(result.commands[0].name.span.end(), 13);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
@ -1,55 +0,0 @@
|
|||||||
#[macro_export]
|
|
||||||
macro_rules! return_ok {
|
|
||||||
($expr:expr) => {
|
|
||||||
match $expr {
|
|
||||||
Ok(val) => return Ok(val),
|
|
||||||
Err(_) => {}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
macro_rules! equal_tokens {
|
|
||||||
($source:tt -> $tokens:expr) => {
|
|
||||||
let result = apply(pipeline, "pipeline", $source);
|
|
||||||
let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens);
|
|
||||||
|
|
||||||
if result != expected_tree {
|
|
||||||
let debug_result = format!("{}", result.debug($source));
|
|
||||||
let debug_expected = format!("{}", expected_tree.debug(&expected_source));
|
|
||||||
|
|
||||||
if debug_result == debug_expected {
|
|
||||||
assert_eq!(
|
|
||||||
result, expected_tree,
|
|
||||||
"NOTE: actual and expected had equivalent debug serializations, source={:?}, debug_expected={:?}",
|
|
||||||
$source,
|
|
||||||
debug_expected
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
assert_eq!(debug_result, debug_expected)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
(<$parser:tt> $source:tt -> $tokens:expr) => {
|
|
||||||
let result = apply($parser, stringify!($parser), $source);
|
|
||||||
|
|
||||||
let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens);
|
|
||||||
|
|
||||||
if result != expected_tree {
|
|
||||||
let debug_result = format!("{}", result.debug($source));
|
|
||||||
let debug_expected = format!("{}", expected_tree.debug(&expected_source));
|
|
||||||
|
|
||||||
if debug_result == debug_expected {
|
|
||||||
assert_eq!(
|
|
||||||
result, expected_tree,
|
|
||||||
"NOTE: actual and expected had equivalent debug serializations, source={:?}, debug_expected={:?}",
|
|
||||||
$source,
|
|
||||||
debug_expected
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
assert_eq!(debug_result, debug_expected)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,12 +1,925 @@
|
|||||||
pub(crate) mod call_node;
|
use std::path::Path;
|
||||||
pub(crate) mod comment;
|
|
||||||
pub(crate) mod files;
|
use nu_errors::{ArgumentError, ParseError};
|
||||||
pub(crate) mod flag;
|
//use crate::hir::*;
|
||||||
pub(crate) mod number;
|
use crate::hir::{
|
||||||
pub(crate) mod operator;
|
Binary, CompareOperator, Expression, Flag, FlagKind, Member, NamedArguments, SpannedExpression,
|
||||||
pub(crate) mod parser;
|
Unit,
|
||||||
pub(crate) mod pipeline;
|
};
|
||||||
pub(crate) mod token_tree;
|
use crate::lite_parse::{lite_parse, LiteCommand, LitePipeline};
|
||||||
pub(crate) mod token_tree_builder;
|
use crate::signature::SignatureRegistry;
|
||||||
pub(crate) mod unit;
|
use crate::{ExternalArg, ExternalArgs, ExternalCommand};
|
||||||
pub(crate) mod util;
|
use nu_protocol::{NamedType, PositionalType, Signature, SyntaxShape, UnspannedPathMember};
|
||||||
|
use nu_source::{Span, Spanned, SpannedItem, Tag};
|
||||||
|
use num_bigint::BigInt;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct InternalCommand {
|
||||||
|
pub name: String,
|
||||||
|
pub name_span: Span,
|
||||||
|
pub args: crate::hir::Call,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InternalCommand {
|
||||||
|
pub fn new(name: String, name_span: Span, full_span: Span) -> InternalCommand {
|
||||||
|
InternalCommand {
|
||||||
|
name: name.clone(),
|
||||||
|
name_span,
|
||||||
|
args: crate::hir::Call::new(
|
||||||
|
Box::new(SpannedExpression::new(Expression::string(name), name_span)),
|
||||||
|
full_span,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum ClassifiedCommand {
|
||||||
|
#[allow(unused)]
|
||||||
|
Comparison(
|
||||||
|
Box<SpannedExpression>,
|
||||||
|
Box<SpannedExpression>,
|
||||||
|
Box<SpannedExpression>,
|
||||||
|
),
|
||||||
|
#[allow(unused)]
|
||||||
|
Dynamic(crate::hir::Call),
|
||||||
|
Internal(InternalCommand),
|
||||||
|
External(crate::ExternalCommand),
|
||||||
|
Error(ParseError),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Commands {
|
||||||
|
pub list: Vec<ClassifiedCommand>,
|
||||||
|
pub span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Commands {
|
||||||
|
pub fn new(span: Span) -> Commands {
|
||||||
|
Commands { list: vec![], span }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn push(&mut self, command: ClassifiedCommand) {
|
||||||
|
self.list.push(command);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ClassifiedPipeline {
|
||||||
|
pub commands: Commands,
|
||||||
|
// this is not a Result to make it crystal clear that these shapes
|
||||||
|
// aren't intended to be used directly with `?`
|
||||||
|
pub failed: Option<ParseError>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClassifiedPipeline {
|
||||||
|
pub fn new(commands: Commands, failed: Option<ParseError>) -> ClassifiedPipeline {
|
||||||
|
ClassifiedPipeline { commands, failed }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses a simple column path, one without a variable (implied or explicit) at the head
|
||||||
|
fn parse_simple_column_path(lite_arg: &Spanned<String>) -> (SpannedExpression, Option<ParseError>) {
|
||||||
|
let mut delimiter = '.';
|
||||||
|
let mut inside_delimiter = false;
|
||||||
|
let mut output = vec![];
|
||||||
|
let mut current_part = String::new();
|
||||||
|
let mut start_index = 0;
|
||||||
|
let mut last_index = 0;
|
||||||
|
|
||||||
|
for (idx, c) in lite_arg.item.char_indices() {
|
||||||
|
last_index = idx;
|
||||||
|
if inside_delimiter {
|
||||||
|
if c == delimiter {
|
||||||
|
inside_delimiter = false;
|
||||||
|
}
|
||||||
|
} else if c == '\'' || c == '"' {
|
||||||
|
inside_delimiter = true;
|
||||||
|
delimiter = c;
|
||||||
|
} else if c == '.' {
|
||||||
|
let part_span = Span::new(
|
||||||
|
lite_arg.span.start() + start_index,
|
||||||
|
lite_arg.span.start() + idx,
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Ok(row_number) = current_part.parse::<u64>() {
|
||||||
|
output.push(Member::Int(BigInt::from(row_number), part_span));
|
||||||
|
} else {
|
||||||
|
let trimmed = trim_quotes(¤t_part);
|
||||||
|
output.push(Member::Bare(trimmed.clone().spanned(part_span)));
|
||||||
|
}
|
||||||
|
current_part.clear();
|
||||||
|
// Note: I believe this is safe because of the delimiter we're using, but if we get fancy with
|
||||||
|
// unicode we'll need to change this
|
||||||
|
start_index = idx + '.'.len_utf8();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
current_part.push(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !current_part.is_empty() {
|
||||||
|
let part_span = Span::new(
|
||||||
|
lite_arg.span.start() + start_index,
|
||||||
|
lite_arg.span.start() + last_index + 1,
|
||||||
|
);
|
||||||
|
if let Ok(row_number) = current_part.parse::<u64>() {
|
||||||
|
output.push(Member::Int(BigInt::from(row_number), part_span));
|
||||||
|
} else {
|
||||||
|
let current_part = trim_quotes(¤t_part);
|
||||||
|
output.push(Member::Bare(current_part.spanned(part_span)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(
|
||||||
|
SpannedExpression::new(Expression::simple_column_path(output), lite_arg.span),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses a column path, adding in the preceding reference to $it if it's elided
|
||||||
|
fn parse_full_column_path(lite_arg: &Spanned<String>) -> (SpannedExpression, Option<ParseError>) {
|
||||||
|
let mut delimiter = '.';
|
||||||
|
let mut inside_delimiter = false;
|
||||||
|
let mut output = vec![];
|
||||||
|
let mut current_part = String::new();
|
||||||
|
let mut start_index = 0;
|
||||||
|
let mut last_index = 0;
|
||||||
|
|
||||||
|
let mut head = None;
|
||||||
|
|
||||||
|
for (idx, c) in lite_arg.item.char_indices() {
|
||||||
|
last_index = idx;
|
||||||
|
if inside_delimiter {
|
||||||
|
if c == delimiter {
|
||||||
|
inside_delimiter = false;
|
||||||
|
}
|
||||||
|
} else if c == '\'' || c == '"' {
|
||||||
|
inside_delimiter = true;
|
||||||
|
delimiter = c;
|
||||||
|
} else if c == '.' {
|
||||||
|
let part_span = Span::new(
|
||||||
|
lite_arg.span.start() + start_index,
|
||||||
|
lite_arg.span.start() + idx,
|
||||||
|
);
|
||||||
|
|
||||||
|
if head.is_none() && current_part.clone().starts_with('$') {
|
||||||
|
// We have the variable head
|
||||||
|
head = Some(Expression::variable(current_part.clone(), part_span))
|
||||||
|
} else if let Ok(row_number) = current_part.parse::<u64>() {
|
||||||
|
output.push(
|
||||||
|
UnspannedPathMember::Int(BigInt::from(row_number)).into_path_member(part_span),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
let current_part = trim_quotes(¤t_part);
|
||||||
|
output.push(
|
||||||
|
UnspannedPathMember::String(current_part.clone()).into_path_member(part_span),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
current_part.clear();
|
||||||
|
// Note: I believe this is safe because of the delimiter we're using, but if we get fancy with
|
||||||
|
// unicode we'll need to change this
|
||||||
|
start_index = idx + '.'.len_utf8();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
current_part.push(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !current_part.is_empty() {
|
||||||
|
let part_span = Span::new(
|
||||||
|
lite_arg.span.start() + start_index,
|
||||||
|
lite_arg.span.start() + last_index + 1,
|
||||||
|
);
|
||||||
|
|
||||||
|
if head.is_none() {
|
||||||
|
if current_part.starts_with('$') {
|
||||||
|
head = Some(Expression::variable(current_part, lite_arg.span));
|
||||||
|
} else if let Ok(row_number) = current_part.parse::<u64>() {
|
||||||
|
output.push(
|
||||||
|
UnspannedPathMember::Int(BigInt::from(row_number)).into_path_member(part_span),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
let current_part = trim_quotes(¤t_part);
|
||||||
|
output.push(UnspannedPathMember::String(current_part).into_path_member(part_span));
|
||||||
|
}
|
||||||
|
} else if let Ok(row_number) = current_part.parse::<u64>() {
|
||||||
|
output.push(
|
||||||
|
UnspannedPathMember::Int(BigInt::from(row_number)).into_path_member(part_span),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
let current_part = trim_quotes(¤t_part);
|
||||||
|
output.push(UnspannedPathMember::String(current_part).into_path_member(part_span));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(head) = head {
|
||||||
|
(
|
||||||
|
SpannedExpression::new(
|
||||||
|
Expression::path(SpannedExpression::new(head, lite_arg.span), output),
|
||||||
|
lite_arg.span,
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
SpannedExpression::new(
|
||||||
|
Expression::path(
|
||||||
|
SpannedExpression::new(
|
||||||
|
Expression::variable("$it".into(), lite_arg.span),
|
||||||
|
lite_arg.span,
|
||||||
|
),
|
||||||
|
output,
|
||||||
|
),
|
||||||
|
lite_arg.span,
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn trim_quotes(input: &str) -> String {
|
||||||
|
let mut chars = input.chars();
|
||||||
|
|
||||||
|
match (chars.next(), chars.next_back()) {
|
||||||
|
(Some('\''), Some('\'')) => chars.collect(),
|
||||||
|
(Some('"'), Some('"')) => chars.collect(),
|
||||||
|
_ => input.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_range(lite_arg: &Spanned<String>) -> (SpannedExpression, Option<ParseError>) {
|
||||||
|
let numbers: Vec<_> = lite_arg.item.split("..").collect();
|
||||||
|
|
||||||
|
if numbers.len() != 2 {
|
||||||
|
(
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("range", lite_arg.clone())),
|
||||||
|
)
|
||||||
|
} else if let Ok(lhs) = numbers[0].parse::<i64>() {
|
||||||
|
if let Ok(rhs) = numbers[1].parse::<i64>() {
|
||||||
|
(
|
||||||
|
SpannedExpression::new(
|
||||||
|
Expression::range(
|
||||||
|
SpannedExpression::new(Expression::integer(lhs), lite_arg.span),
|
||||||
|
lite_arg.span,
|
||||||
|
SpannedExpression::new(Expression::integer(rhs), lite_arg.span),
|
||||||
|
),
|
||||||
|
lite_arg.span,
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("range", lite_arg.clone())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("range", lite_arg.clone())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_operator(lite_arg: &Spanned<String>) -> (SpannedExpression, Option<ParseError>) {
|
||||||
|
let operator = if lite_arg.item == "==" {
|
||||||
|
CompareOperator::Equal
|
||||||
|
} else if lite_arg.item == "!=" {
|
||||||
|
CompareOperator::NotEqual
|
||||||
|
} else if lite_arg.item == "<" {
|
||||||
|
CompareOperator::LessThan
|
||||||
|
} else if lite_arg.item == "<=" {
|
||||||
|
CompareOperator::LessThanOrEqual
|
||||||
|
} else if lite_arg.item == ">" {
|
||||||
|
CompareOperator::GreaterThan
|
||||||
|
} else if lite_arg.item == ">=" {
|
||||||
|
CompareOperator::GreaterThanOrEqual
|
||||||
|
} else if lite_arg.item == "=~" {
|
||||||
|
CompareOperator::Contains
|
||||||
|
} else if lite_arg.item == "!~" {
|
||||||
|
CompareOperator::NotContains
|
||||||
|
} else {
|
||||||
|
return (
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch(
|
||||||
|
"comparison operator",
|
||||||
|
lite_arg.clone(),
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
(
|
||||||
|
SpannedExpression::new(Expression::operator(operator), lite_arg.span),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_unit(lite_arg: &Spanned<String>) -> (SpannedExpression, Option<ParseError>) {
|
||||||
|
let unit_groups = [
|
||||||
|
(Unit::Byte, true, vec!["b", "B"]),
|
||||||
|
(Unit::Kilobyte, true, vec!["kb", "KB", "Kb"]),
|
||||||
|
(Unit::Megabyte, true, vec!["mb", "MB", "Mb"]),
|
||||||
|
(Unit::Gigabyte, true, vec!["gb", "GB", "Gb"]),
|
||||||
|
(Unit::Terabyte, true, vec!["tb", "TB", "Tb"]),
|
||||||
|
(Unit::Petabyte, true, vec!["pb", "PB", "Pb"]),
|
||||||
|
(Unit::Second, false, vec!["s"]),
|
||||||
|
(Unit::Minute, false, vec!["m"]),
|
||||||
|
(Unit::Hour, false, vec!["h"]),
|
||||||
|
(Unit::Day, false, vec!["d"]),
|
||||||
|
(Unit::Week, false, vec!["w"]),
|
||||||
|
(Unit::Month, false, vec!["M"]),
|
||||||
|
(Unit::Year, false, vec!["y"]),
|
||||||
|
];
|
||||||
|
|
||||||
|
for unit_group in unit_groups.iter() {
|
||||||
|
for unit in unit_group.2.iter() {
|
||||||
|
if lite_arg.item.ends_with(unit) {
|
||||||
|
let mut lhs = lite_arg.item.clone();
|
||||||
|
|
||||||
|
for _ in 0..unit.len() {
|
||||||
|
lhs.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
if unit_group.1 {
|
||||||
|
// these units are allowed to signed
|
||||||
|
if let Ok(x) = lhs.parse::<i64>() {
|
||||||
|
let lhs_span =
|
||||||
|
Span::new(lite_arg.span.start(), lite_arg.span.start() + lhs.len());
|
||||||
|
let unit_span =
|
||||||
|
Span::new(lite_arg.span.start() + lhs.len(), lite_arg.span.end());
|
||||||
|
return (
|
||||||
|
SpannedExpression::new(
|
||||||
|
Expression::unit(
|
||||||
|
x.spanned(lhs_span),
|
||||||
|
unit_group.0.spanned(unit_span),
|
||||||
|
),
|
||||||
|
lite_arg.span,
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// these units are unsigned
|
||||||
|
if let Ok(x) = lhs.parse::<u64>() {
|
||||||
|
let lhs_span =
|
||||||
|
Span::new(lite_arg.span.start(), lite_arg.span.start() + lhs.len());
|
||||||
|
let unit_span =
|
||||||
|
Span::new(lite_arg.span.start() + lhs.len(), lite_arg.span.end());
|
||||||
|
return (
|
||||||
|
SpannedExpression::new(
|
||||||
|
Expression::unit(
|
||||||
|
(x as i64).spanned(lhs_span),
|
||||||
|
unit_group.0.spanned(unit_span),
|
||||||
|
),
|
||||||
|
lite_arg.span,
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("unit", lite_arg.clone())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses the given argument using the shape as a guide for how to correctly parse the argument
|
||||||
|
fn parse_arg(
|
||||||
|
expected_type: SyntaxShape,
|
||||||
|
registry: &dyn SignatureRegistry,
|
||||||
|
lite_arg: &Spanned<String>,
|
||||||
|
) -> (SpannedExpression, Option<ParseError>) {
|
||||||
|
if lite_arg.item.starts_with('$') {
|
||||||
|
return parse_full_column_path(&lite_arg);
|
||||||
|
}
|
||||||
|
|
||||||
|
match expected_type {
|
||||||
|
SyntaxShape::Number => {
|
||||||
|
if let Ok(x) = lite_arg.item.parse::<i64>() {
|
||||||
|
(
|
||||||
|
SpannedExpression::new(Expression::integer(x), lite_arg.span),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
} else if let Ok(x) = lite_arg.item.parse::<f64>() {
|
||||||
|
(
|
||||||
|
SpannedExpression::new(Expression::decimal(x), lite_arg.span),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("number", lite_arg.clone())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SyntaxShape::Int => {
|
||||||
|
if let Ok(x) = lite_arg.item.parse::<i64>() {
|
||||||
|
(
|
||||||
|
SpannedExpression::new(Expression::integer(x), lite_arg.span),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("number", lite_arg.clone())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SyntaxShape::String => {
|
||||||
|
let trimmed = trim_quotes(&lite_arg.item);
|
||||||
|
(
|
||||||
|
SpannedExpression::new(Expression::string(trimmed), lite_arg.span),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
SyntaxShape::Pattern => {
|
||||||
|
let trimmed = trim_quotes(&lite_arg.item);
|
||||||
|
let expanded = shellexpand::tilde(&trimmed).to_string();
|
||||||
|
(
|
||||||
|
SpannedExpression::new(Expression::pattern(expanded), lite_arg.span),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
SyntaxShape::Range => parse_range(&lite_arg),
|
||||||
|
SyntaxShape::Operator => parse_operator(&lite_arg),
|
||||||
|
SyntaxShape::Unit => parse_unit(&lite_arg),
|
||||||
|
SyntaxShape::Path => {
|
||||||
|
let trimmed = trim_quotes(&lite_arg.item);
|
||||||
|
let expanded = shellexpand::tilde(&trimmed).to_string();
|
||||||
|
let path = Path::new(&expanded);
|
||||||
|
(
|
||||||
|
SpannedExpression::new(Expression::FilePath(path.to_path_buf()), lite_arg.span),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
SyntaxShape::ColumnPath => parse_simple_column_path(lite_arg),
|
||||||
|
SyntaxShape::FullColumnPath => parse_full_column_path(lite_arg),
|
||||||
|
SyntaxShape::Any => {
|
||||||
|
let shapes = vec![
|
||||||
|
SyntaxShape::Int,
|
||||||
|
SyntaxShape::Number,
|
||||||
|
SyntaxShape::Range,
|
||||||
|
SyntaxShape::Unit,
|
||||||
|
SyntaxShape::Block,
|
||||||
|
SyntaxShape::Table,
|
||||||
|
SyntaxShape::String,
|
||||||
|
];
|
||||||
|
for shape in shapes.iter() {
|
||||||
|
if let (s, None) = parse_arg(*shape, registry, lite_arg) {
|
||||||
|
return (s, None);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("any shape", lite_arg.clone())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
SyntaxShape::Table => {
|
||||||
|
let mut chars = lite_arg.item.chars();
|
||||||
|
|
||||||
|
match (chars.next(), chars.next_back()) {
|
||||||
|
(Some('['), Some(']')) => {
|
||||||
|
// We have a literal row
|
||||||
|
let string: String = chars.collect();
|
||||||
|
let mut error = None;
|
||||||
|
|
||||||
|
// We haven't done much with the inner string, so let's go ahead and work with it
|
||||||
|
let lite_pipeline = match lite_parse(&string, lite_arg.span.start() + 1) {
|
||||||
|
Ok(lp) => lp,
|
||||||
|
Err(e) => return (garbage(lite_arg.span), Some(e)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut output = vec![];
|
||||||
|
for lite_inner in &lite_pipeline.commands {
|
||||||
|
let (arg, err) = parse_arg(SyntaxShape::Any, registry, &lite_inner.name);
|
||||||
|
|
||||||
|
output.push(arg);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
|
||||||
|
for arg in &lite_inner.args {
|
||||||
|
let (arg, err) = parse_arg(SyntaxShape::Any, registry, &arg);
|
||||||
|
output.push(arg);
|
||||||
|
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(
|
||||||
|
SpannedExpression::new(Expression::List(output), lite_arg.span),
|
||||||
|
error,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
_ => (
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("table", lite_arg.clone())),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SyntaxShape::Block => {
|
||||||
|
// Blocks have one of two forms: the literal block and the implied block
|
||||||
|
// To parse a literal block, we need to detect that what we have is itself a block
|
||||||
|
let mut chars = lite_arg.item.chars();
|
||||||
|
|
||||||
|
match (chars.next(), chars.next_back()) {
|
||||||
|
(Some('{'), Some('}')) => {
|
||||||
|
// We have a literal block
|
||||||
|
let string: String = chars.collect();
|
||||||
|
let mut error = None;
|
||||||
|
|
||||||
|
// We haven't done much with the inner string, so let's go ahead and work with it
|
||||||
|
let lite_pipeline = match lite_parse(&string, lite_arg.span.start() + 1) {
|
||||||
|
Ok(lp) => lp,
|
||||||
|
Err(e) => return (garbage(lite_arg.span), Some(e)),
|
||||||
|
};
|
||||||
|
//let pipeline = parse(&lite_pipeline, definitions)?;
|
||||||
|
|
||||||
|
// For now, just take the first command
|
||||||
|
if let Some(lite_cmd) = lite_pipeline.commands.first() {
|
||||||
|
if lite_cmd.args.len() != 2 {
|
||||||
|
return (
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("block", lite_arg.clone())),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
let (lhs, err) =
|
||||||
|
parse_arg(SyntaxShape::FullColumnPath, registry, &lite_cmd.name);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
let (op, err) =
|
||||||
|
parse_arg(SyntaxShape::Operator, registry, &lite_cmd.args[0]);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
let (rhs, err) = parse_arg(SyntaxShape::Any, registry, &lite_cmd.args[1]);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
|
||||||
|
let span = Span::new(lhs.span.start(), rhs.span.end());
|
||||||
|
let binary = SpannedExpression::new(
|
||||||
|
Expression::Binary(Box::new(Binary::new(lhs, op, rhs))),
|
||||||
|
span,
|
||||||
|
);
|
||||||
|
(
|
||||||
|
SpannedExpression::new(Expression::Block(vec![binary]), span),
|
||||||
|
error,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("block", lite_arg.clone())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
// We have an implied block, but we can't parse this here
|
||||||
|
// it needed to have been parsed up higher where we have control over more than one arg
|
||||||
|
(
|
||||||
|
garbage(lite_arg.span),
|
||||||
|
Some(ParseError::mismatch("block", lite_arg.clone())),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Match the available flags in a signature with what the user provided. This will check both long-form flags (--full) and shorthand flags (-f)
|
||||||
|
/// This also allows users to provide a group of shorthand flags (-af) that correspond to multiple shorthand flags at once.
|
||||||
|
fn get_flags_from_flag(
|
||||||
|
signature: &nu_protocol::Signature,
|
||||||
|
cmd: &Spanned<String>,
|
||||||
|
arg: &Spanned<String>,
|
||||||
|
) -> (Vec<(String, NamedType)>, Option<ParseError>) {
|
||||||
|
if arg.item.starts_with('-') {
|
||||||
|
// It's a flag (or set of flags)
|
||||||
|
let mut output = vec![];
|
||||||
|
let mut error = None;
|
||||||
|
|
||||||
|
let remainder: String = arg.item.chars().skip(1).collect();
|
||||||
|
|
||||||
|
if remainder.starts_with('-') {
|
||||||
|
// Long flag expected
|
||||||
|
let remainder: String = remainder.chars().skip(1).collect();
|
||||||
|
if let Some((named_type, _)) = signature.named.get(&remainder) {
|
||||||
|
output.push((remainder.clone(), named_type.clone()));
|
||||||
|
} else {
|
||||||
|
error = Some(ParseError::argument_error(
|
||||||
|
cmd.clone(),
|
||||||
|
ArgumentError::UnexpectedFlag(arg.clone()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Short flag(s) expected
|
||||||
|
let mut starting_pos = arg.span.start() + 1;
|
||||||
|
for c in remainder.chars() {
|
||||||
|
let mut found = false;
|
||||||
|
for (full_name, named_arg) in signature.named.iter() {
|
||||||
|
if Some(c) == named_arg.0.get_short() {
|
||||||
|
found = true;
|
||||||
|
output.push((full_name.clone(), named_arg.0.clone()));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found {
|
||||||
|
error = Some(ParseError::argument_error(
|
||||||
|
cmd.clone(),
|
||||||
|
ArgumentError::UnexpectedFlag(
|
||||||
|
arg.item
|
||||||
|
.clone()
|
||||||
|
.spanned(Span::new(starting_pos, starting_pos + c.len_utf8())),
|
||||||
|
),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
starting_pos += c.len_utf8();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(output, error)
|
||||||
|
} else {
|
||||||
|
// It's not a flag, so don't bother with it
|
||||||
|
(vec![], None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn classify_positional_arg(
|
||||||
|
idx: usize,
|
||||||
|
lite_cmd: &LiteCommand,
|
||||||
|
positional_type: &PositionalType,
|
||||||
|
registry: &dyn SignatureRegistry,
|
||||||
|
) -> (usize, SpannedExpression, Option<ParseError>) {
|
||||||
|
let mut idx = idx;
|
||||||
|
let mut error = None;
|
||||||
|
let arg = match positional_type {
|
||||||
|
PositionalType::Mandatory(_, SyntaxShape::Block)
|
||||||
|
| PositionalType::Optional(_, SyntaxShape::Block) => {
|
||||||
|
// We may have an implied block, so let's try to parse it here
|
||||||
|
// The only implied block format we currently support is <shorthand path> <operator> <any>, though
|
||||||
|
// we may want to revisit this in the future
|
||||||
|
|
||||||
|
// TODO: only do this step if it's not a literal block
|
||||||
|
if (idx + 2) < lite_cmd.args.len() {
|
||||||
|
let (lhs, err) =
|
||||||
|
parse_arg(SyntaxShape::FullColumnPath, registry, &lite_cmd.args[idx]);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
let (op, err) = parse_arg(SyntaxShape::Operator, registry, &lite_cmd.args[idx + 1]);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
let (rhs, err) = parse_arg(SyntaxShape::Any, registry, &lite_cmd.args[idx + 2]);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
idx += 2;
|
||||||
|
let span = Span::new(lhs.span.start(), rhs.span.end());
|
||||||
|
let binary = SpannedExpression::new(
|
||||||
|
Expression::Binary(Box::new(Binary::new(lhs, op, rhs))),
|
||||||
|
span,
|
||||||
|
);
|
||||||
|
SpannedExpression::new(Expression::Block(vec![binary]), span)
|
||||||
|
} else {
|
||||||
|
let (arg, err) = parse_arg(SyntaxShape::Block, registry, &lite_cmd.args[idx]);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
arg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
PositionalType::Mandatory(_, shape) => {
|
||||||
|
let (arg, err) = parse_arg(*shape, registry, &lite_cmd.args[idx]);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
arg
|
||||||
|
}
|
||||||
|
PositionalType::Optional(_, shape) => {
|
||||||
|
let (arg, err) = parse_arg(*shape, registry, &lite_cmd.args[idx]);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
arg
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
(idx, arg, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn classify_internal_command(
|
||||||
|
lite_cmd: &LiteCommand,
|
||||||
|
registry: &dyn SignatureRegistry,
|
||||||
|
signature: &Signature,
|
||||||
|
) -> (InternalCommand, Option<ParseError>) {
|
||||||
|
// This is a known internal command, so we need to work with the arguments and parse them according to the expected types
|
||||||
|
let mut internal_command = InternalCommand::new(
|
||||||
|
lite_cmd.name.item.clone(),
|
||||||
|
lite_cmd.name.span,
|
||||||
|
lite_cmd.span(),
|
||||||
|
);
|
||||||
|
internal_command.args.set_initial_flags(&signature);
|
||||||
|
|
||||||
|
let mut idx = 0;
|
||||||
|
let mut current_positional = 0;
|
||||||
|
let mut named = NamedArguments::new();
|
||||||
|
let mut positional = vec![];
|
||||||
|
let mut error = None;
|
||||||
|
|
||||||
|
while idx < lite_cmd.args.len() {
|
||||||
|
if lite_cmd.args[idx].item.starts_with('-') && lite_cmd.args[idx].item.len() > 1 {
|
||||||
|
let (named_types, err) =
|
||||||
|
get_flags_from_flag(&signature, &lite_cmd.name, &lite_cmd.args[idx]);
|
||||||
|
|
||||||
|
if err.is_none() {
|
||||||
|
for (full_name, named_type) in &named_types {
|
||||||
|
match named_type {
|
||||||
|
NamedType::Mandatory(_, shape) | NamedType::Optional(_, shape) => {
|
||||||
|
if idx == lite_cmd.args.len() {
|
||||||
|
// Oops, we're missing the argument to our named argument
|
||||||
|
if error.is_none() {
|
||||||
|
error = Some(ParseError::argument_error(
|
||||||
|
lite_cmd.name.clone(),
|
||||||
|
ArgumentError::MissingValueForName(format!("{:?}", shape)),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
idx += 1;
|
||||||
|
if lite_cmd.args.len() > idx {
|
||||||
|
let (arg, err) =
|
||||||
|
parse_arg(*shape, registry, &lite_cmd.args[idx]);
|
||||||
|
named.insert_mandatory(
|
||||||
|
full_name.clone(),
|
||||||
|
lite_cmd.args[idx - 1].span,
|
||||||
|
arg,
|
||||||
|
);
|
||||||
|
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
} else if error.is_none() {
|
||||||
|
error = Some(ParseError::argument_error(
|
||||||
|
lite_cmd.name.clone(),
|
||||||
|
ArgumentError::MissingValueForName(full_name.to_owned()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
NamedType::Switch(_) => {
|
||||||
|
named.insert_switch(
|
||||||
|
full_name.clone(),
|
||||||
|
Some(Flag::new(FlagKind::Longhand, lite_cmd.args[idx].span)),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
positional.push(garbage(lite_cmd.args[idx].span));
|
||||||
|
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if signature.positional.len() > current_positional {
|
||||||
|
let arg = {
|
||||||
|
let (new_idx, expr, err) = classify_positional_arg(
|
||||||
|
idx,
|
||||||
|
&lite_cmd,
|
||||||
|
&signature.positional[current_positional].0,
|
||||||
|
registry,
|
||||||
|
);
|
||||||
|
idx = new_idx;
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
expr
|
||||||
|
};
|
||||||
|
|
||||||
|
positional.push(arg);
|
||||||
|
current_positional += 1;
|
||||||
|
} else if let Some((rest_type, _)) = &signature.rest_positional {
|
||||||
|
let (arg, err) = parse_arg(*rest_type, registry, &lite_cmd.args[idx]);
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
|
||||||
|
positional.push(arg);
|
||||||
|
current_positional += 1;
|
||||||
|
} else {
|
||||||
|
positional.push(garbage(lite_cmd.args[idx].span));
|
||||||
|
|
||||||
|
if error.is_none() {
|
||||||
|
error = Some(ParseError::argument_error(
|
||||||
|
lite_cmd.name.clone(),
|
||||||
|
ArgumentError::UnexpectedArgument(lite_cmd.args[idx].clone()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
idx += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count the required positional arguments and ensure these have been met
|
||||||
|
let mut required_arg_count = 0;
|
||||||
|
for positional_arg in &signature.positional {
|
||||||
|
if let PositionalType::Mandatory(_, _) = positional_arg.0 {
|
||||||
|
required_arg_count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if positional.len() < required_arg_count && error.is_none() {
|
||||||
|
let (_, name) = &signature.positional[positional.len()];
|
||||||
|
error = Some(ParseError::argument_error(
|
||||||
|
lite_cmd.name.clone(),
|
||||||
|
ArgumentError::MissingMandatoryPositional(name.to_owned()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !named.is_empty() {
|
||||||
|
internal_command.args.named = Some(named);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !positional.is_empty() {
|
||||||
|
internal_command.args.positional = Some(positional);
|
||||||
|
}
|
||||||
|
|
||||||
|
(internal_command, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert a lite-ly parsed pipeline into a fully classified pipeline, ready to be evaluated.
|
||||||
|
/// This conversion does error-recovery, so the result is allowed to be lossy. A lossy unit is designated as garbage.
|
||||||
|
/// Errors are returned as part of a side-car error rather than a Result to allow both error and lossy result simultaneously.
|
||||||
|
pub fn classify_pipeline(
|
||||||
|
lite_pipeline: &LitePipeline,
|
||||||
|
registry: &dyn SignatureRegistry,
|
||||||
|
) -> ClassifiedPipeline {
|
||||||
|
// FIXME: fake span
|
||||||
|
let mut commands = Commands::new(Span::new(0, 0));
|
||||||
|
let mut error = None;
|
||||||
|
|
||||||
|
for lite_cmd in lite_pipeline.commands.iter() {
|
||||||
|
if lite_cmd.name.item.starts_with('^') {
|
||||||
|
let cmd_name: String = lite_cmd.name.item.chars().skip(1).collect();
|
||||||
|
// This is an external command we should allow arguments to pass through with minimal parsing
|
||||||
|
commands.push(ClassifiedCommand::External(ExternalCommand {
|
||||||
|
name: cmd_name,
|
||||||
|
name_tag: Tag::unknown_anchor(lite_cmd.name.span),
|
||||||
|
args: ExternalArgs {
|
||||||
|
list: lite_cmd
|
||||||
|
.args
|
||||||
|
.iter()
|
||||||
|
.map(|x| ExternalArg {
|
||||||
|
arg: x.item.clone(),
|
||||||
|
tag: Tag::unknown_anchor(x.span),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
span: Span::new(0, 0),
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
} else if let Some(signature) = registry.get(&lite_cmd.name.item) {
|
||||||
|
let (internal_command, err) =
|
||||||
|
classify_internal_command(&lite_cmd, registry, &signature);
|
||||||
|
|
||||||
|
if error.is_none() {
|
||||||
|
error = err;
|
||||||
|
}
|
||||||
|
commands.push(ClassifiedCommand::Internal(internal_command))
|
||||||
|
} else {
|
||||||
|
let trimmed = trim_quotes(&lite_cmd.name.item);
|
||||||
|
let name = shellexpand::tilde(&trimmed).to_string();
|
||||||
|
// This is an external command we should allow arguments to pass through with minimal parsing
|
||||||
|
commands.push(ClassifiedCommand::External(ExternalCommand {
|
||||||
|
name,
|
||||||
|
name_tag: Tag::unknown_anchor(lite_cmd.name.span),
|
||||||
|
args: ExternalArgs {
|
||||||
|
list: lite_cmd
|
||||||
|
.args
|
||||||
|
.iter()
|
||||||
|
.map(|x| ExternalArg {
|
||||||
|
arg: x.item.clone(),
|
||||||
|
tag: Tag::unknown_anchor(x.span),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
span: Span::new(0, 0),
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ClassifiedPipeline::new(commands, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Easy shorthand function to create a garbage expression at the given span
|
||||||
|
pub fn garbage(span: Span) -> SpannedExpression {
|
||||||
|
SpannedExpression::new(Expression::Garbage, span)
|
||||||
|
}
|
||||||
|
@ -1,45 +0,0 @@
|
|||||||
use crate::parse::token_tree::SpannedToken;
|
|
||||||
use getset::Getters;
|
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
|
||||||
pub struct CallNode {
|
|
||||||
#[get = "pub(crate)"]
|
|
||||||
head: Box<SpannedToken>,
|
|
||||||
#[get = "pub(crate)"]
|
|
||||||
children: Option<Vec<SpannedToken>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for CallNode {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::typed(
|
|
||||||
"call",
|
|
||||||
self.head.pretty_debug(source)
|
|
||||||
+ b::preceded(
|
|
||||||
b::space(),
|
|
||||||
b::intersperse(
|
|
||||||
self.children.iter().flat_map(|children| {
|
|
||||||
children.iter().map(|child| child.pretty_debug(source))
|
|
||||||
}),
|
|
||||||
b::space(),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CallNode {
|
|
||||||
pub fn new(head: Box<SpannedToken>, children: Vec<SpannedToken>) -> CallNode {
|
|
||||||
if children.is_empty() {
|
|
||||||
CallNode {
|
|
||||||
head,
|
|
||||||
children: None,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
CallNode {
|
|
||||||
head,
|
|
||||||
children: Some(children),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,34 +0,0 @@
|
|||||||
use derive_new::new;
|
|
||||||
use getset::Getters;
|
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
|
||||||
pub enum CommentKind {
|
|
||||||
Line,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
|
|
||||||
pub struct Comment {
|
|
||||||
pub(crate) kind: CommentKind,
|
|
||||||
pub(crate) text: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Comment {
|
|
||||||
pub fn line(text: impl Into<Span>) -> Comment {
|
|
||||||
Comment {
|
|
||||||
kind: CommentKind::Line,
|
|
||||||
text: text.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Comment {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
let prefix = match self.kind {
|
|
||||||
CommentKind::Line => b::description("#"),
|
|
||||||
};
|
|
||||||
|
|
||||||
prefix + b::description(self.text.slice(source))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,38 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
|
||||||
use derive_new::new;
|
|
||||||
use getset::Getters;
|
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
|
||||||
pub enum FlagKind {
|
|
||||||
Shorthand,
|
|
||||||
Longhand,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
|
|
||||||
#[get = "pub(crate)"]
|
|
||||||
pub struct Flag {
|
|
||||||
pub(crate) kind: FlagKind,
|
|
||||||
pub(crate) name: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Flag {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
let prefix = match self.kind {
|
|
||||||
FlagKind::Longhand => b::description("--"),
|
|
||||||
FlagKind::Shorthand => b::description("-"),
|
|
||||||
};
|
|
||||||
|
|
||||||
prefix + b::description(self.name.slice(source))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Flag {
|
|
||||||
pub fn color(&self, span: impl Into<Span>) -> Spanned<FlatShape> {
|
|
||||||
match self.kind {
|
|
||||||
FlagKind::Longhand => FlatShape::Flag.spanned(span.into()),
|
|
||||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(span.into()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,70 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::FlatShape;
|
|
||||||
use crate::parse::parser::Number;
|
|
||||||
use bigdecimal::BigDecimal;
|
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Text};
|
|
||||||
use num_bigint::BigInt;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
|
||||||
pub enum RawNumber {
|
|
||||||
Int(Span),
|
|
||||||
Decimal(Span),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for RawNumber {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
RawNumber::Int(span) => *span,
|
|
||||||
RawNumber::Decimal(span) => *span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for RawNumber {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
match self {
|
|
||||||
RawNumber::Int(span) => b::primitive(span.slice(source)),
|
|
||||||
RawNumber::Decimal(span) => b::primitive(span.slice(source)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RawNumber {
|
|
||||||
pub fn as_flat_shape(&self) -> FlatShape {
|
|
||||||
match self {
|
|
||||||
RawNumber::Int(_) => FlatShape::Int,
|
|
||||||
RawNumber::Decimal(_) => FlatShape::Decimal,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn int(span: impl Into<Span>) -> RawNumber {
|
|
||||||
let span = span.into();
|
|
||||||
|
|
||||||
RawNumber::Int(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decimal(span: impl Into<Span>) -> RawNumber {
|
|
||||||
let span = span.into();
|
|
||||||
|
|
||||||
RawNumber::Decimal(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn to_number(self, source: &Text) -> Number {
|
|
||||||
match self {
|
|
||||||
RawNumber::Int(tag) => {
|
|
||||||
if let Ok(big_int) = BigInt::from_str(tag.slice(source)) {
|
|
||||||
Number::Int(big_int)
|
|
||||||
} else {
|
|
||||||
unreachable!("Internal error: could not parse text as BigInt as expected")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
RawNumber::Decimal(tag) => {
|
|
||||||
if let Ok(big_decimal) = BigDecimal::from_str(tag.slice(source)) {
|
|
||||||
Number::Decimal(big_decimal)
|
|
||||||
} else {
|
|
||||||
unreachable!("Internal error: could not parse text as BigDecimal as expected")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,114 +0,0 @@
|
|||||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
|
||||||
pub enum CompareOperator {
|
|
||||||
Equal,
|
|
||||||
NotEqual,
|
|
||||||
LessThan,
|
|
||||||
GreaterThan,
|
|
||||||
LessThanOrEqual,
|
|
||||||
GreaterThanOrEqual,
|
|
||||||
Contains,
|
|
||||||
NotContains,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for CompareOperator {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
b::operator(self.as_str())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CompareOperator {
|
|
||||||
pub fn print(self) -> String {
|
|
||||||
self.as_str().to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
CompareOperator::Equal => "==",
|
|
||||||
CompareOperator::NotEqual => "!=",
|
|
||||||
CompareOperator::LessThan => "<",
|
|
||||||
CompareOperator::GreaterThan => ">",
|
|
||||||
CompareOperator::LessThanOrEqual => "<=",
|
|
||||||
CompareOperator::GreaterThanOrEqual => ">=",
|
|
||||||
CompareOperator::Contains => "=~",
|
|
||||||
CompareOperator::NotContains => "!~",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for CompareOperator {
|
|
||||||
fn from(input: &str) -> CompareOperator {
|
|
||||||
if let Ok(output) = CompareOperator::from_str(input) {
|
|
||||||
output
|
|
||||||
} else {
|
|
||||||
unreachable!("Internal error: CompareOperator from failed")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for CompareOperator {
|
|
||||||
type Err = ();
|
|
||||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
|
||||||
match input {
|
|
||||||
"==" => Ok(CompareOperator::Equal),
|
|
||||||
"!=" => Ok(CompareOperator::NotEqual),
|
|
||||||
"<" => Ok(CompareOperator::LessThan),
|
|
||||||
">" => Ok(CompareOperator::GreaterThan),
|
|
||||||
"<=" => Ok(CompareOperator::LessThanOrEqual),
|
|
||||||
">=" => Ok(CompareOperator::GreaterThanOrEqual),
|
|
||||||
"=~" => Ok(CompareOperator::Contains),
|
|
||||||
"!~" => Ok(CompareOperator::NotContains),
|
|
||||||
_ => Err(()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
|
||||||
pub enum EvaluationOperator {
|
|
||||||
Dot,
|
|
||||||
DotDot,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for EvaluationOperator {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
b::operator(self.as_str())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EvaluationOperator {
|
|
||||||
pub fn print(self) -> String {
|
|
||||||
self.as_str().to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
EvaluationOperator::Dot => ".",
|
|
||||||
EvaluationOperator::DotDot => "..",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for EvaluationOperator {
|
|
||||||
fn from(input: &str) -> EvaluationOperator {
|
|
||||||
if let Ok(output) = EvaluationOperator::from_str(input) {
|
|
||||||
output
|
|
||||||
} else {
|
|
||||||
unreachable!("Internal error: EvaluationOperator 'from' failed")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for EvaluationOperator {
|
|
||||||
type Err = ();
|
|
||||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
|
||||||
match input {
|
|
||||||
"." => Ok(EvaluationOperator::Dot),
|
|
||||||
".." => Ok(EvaluationOperator::DotDot),
|
|
||||||
_ => Err(()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -1,84 +0,0 @@
|
|||||||
use crate::{SpannedToken, Token};
|
|
||||||
use derive_new::new;
|
|
||||||
use getset::Getters;
|
|
||||||
use nu_source::{
|
|
||||||
b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
|
|
||||||
pub struct Pipeline {
|
|
||||||
#[get = "pub"]
|
|
||||||
pub(crate) parts: Vec<PipelineElement>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoSpanned for Pipeline {
|
|
||||||
type Output = Spanned<Pipeline>;
|
|
||||||
|
|
||||||
fn into_spanned(self, span: impl Into<Span>) -> Self::Output {
|
|
||||||
self.spanned(span.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
|
||||||
pub struct Tokens {
|
|
||||||
pub(crate) tokens: Vec<SpannedToken>,
|
|
||||||
pub(crate) span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Tokens {
|
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &SpannedToken> {
|
|
||||||
self.tokens.iter()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
|
||||||
pub struct PipelineElement {
|
|
||||||
pub pipe: Option<Span>,
|
|
||||||
pub tokens: Tokens,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for PipelineElement {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
match self.pipe {
|
|
||||||
Option::None => self.tokens.span,
|
|
||||||
Option::Some(pipe) => pipe.until(self.tokens.span),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PipelineElement {
|
|
||||||
pub fn new(pipe: Option<Span>, tokens: Spanned<Vec<SpannedToken>>) -> PipelineElement {
|
|
||||||
PipelineElement {
|
|
||||||
pipe,
|
|
||||||
tokens: Tokens {
|
|
||||||
tokens: tokens.item,
|
|
||||||
span: tokens.span,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tokens(&self) -> &[SpannedToken] {
|
|
||||||
&self.tokens.tokens
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Pipeline {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::intersperse(
|
|
||||||
self.parts.iter().map(|token| token.pretty_debug(source)),
|
|
||||||
b::operator(" | "),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for PipelineElement {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::intersperse(
|
|
||||||
self.tokens.iter().map(|token| match token.unspanned() {
|
|
||||||
Token::Whitespace => b::blank(),
|
|
||||||
_ => token.pretty_debug(source),
|
|
||||||
}),
|
|
||||||
b::space(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,544 +0,0 @@
|
|||||||
#![allow(clippy::type_complexity)]
|
|
||||||
use crate::parse::{call_node::*, comment::*, flag::*, number::*, operator::*, pipeline::*};
|
|
||||||
use derive_new::new;
|
|
||||||
use getset::Getters;
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
|
||||||
use nu_source::{
|
|
||||||
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
|
||||||
};
|
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::ops::Deref;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
|
||||||
pub enum Token {
|
|
||||||
Number(RawNumber),
|
|
||||||
CompareOperator(CompareOperator),
|
|
||||||
EvaluationOperator(EvaluationOperator),
|
|
||||||
String(Span),
|
|
||||||
Variable(Span),
|
|
||||||
ItVariable(Span),
|
|
||||||
ExternalCommand(Span),
|
|
||||||
ExternalWord,
|
|
||||||
GlobPattern,
|
|
||||||
Bare,
|
|
||||||
Garbage,
|
|
||||||
|
|
||||||
Call(CallNode),
|
|
||||||
Delimited(DelimitedNode),
|
|
||||||
Pipeline(Pipeline),
|
|
||||||
Flag(Flag),
|
|
||||||
Comment(Comment),
|
|
||||||
Whitespace,
|
|
||||||
Separator,
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! token_type {
|
|
||||||
(struct $name:tt (desc: $desc:tt) -> $out:ty { |$span:ident, $pat:pat| => $do:expr }) => {
|
|
||||||
pub struct $name;
|
|
||||||
|
|
||||||
impl TokenType for $name {
|
|
||||||
type Output = $out;
|
|
||||||
|
|
||||||
fn desc(&self) -> Cow<'static, str> {
|
|
||||||
Cow::Borrowed($desc)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_token_value(
|
|
||||||
&self,
|
|
||||||
token: &SpannedToken,
|
|
||||||
err: ParseErrorFn<$out>,
|
|
||||||
) -> Result<$out, ParseError> {
|
|
||||||
let $span = token.span();
|
|
||||||
|
|
||||||
match *token.unspanned() {
|
|
||||||
$pat => Ok($do),
|
|
||||||
_ => err(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
(struct $name:tt (desc: $desc:tt) -> $out:ty { $pat:pat => $do:expr }) => {
|
|
||||||
pub struct $name;
|
|
||||||
|
|
||||||
impl TokenType for $name {
|
|
||||||
type Output = $out;
|
|
||||||
|
|
||||||
fn desc(&self) -> Cow<'static, str> {
|
|
||||||
Cow::Borrowed($desc)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_token_value(
|
|
||||||
&self,
|
|
||||||
token: &SpannedToken,
|
|
||||||
err: ParseErrorFn<$out>,
|
|
||||||
) -> Result<$out, ParseError> {
|
|
||||||
match token.unspanned().clone() {
|
|
||||||
$pat => Ok($do),
|
|
||||||
_ => err(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type ParseErrorFn<'a, T> = &'a dyn Fn() -> Result<T, ParseError>;
|
|
||||||
|
|
||||||
token_type!(struct IntType (desc: "integer") -> RawNumber {
|
|
||||||
Token::Number(number @ RawNumber::Int(_)) => number
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct DecimalType (desc: "decimal") -> RawNumber {
|
|
||||||
Token::Number(number @ RawNumber::Decimal(_)) => number
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct StringType (desc: "string") -> (Span, Span) {
|
|
||||||
|outer, Token::String(inner)| => (inner, outer)
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct BareType (desc: "word") -> Span {
|
|
||||||
|span, Token::Bare| => span
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct DotType (desc: "dot") -> Span {
|
|
||||||
|span, Token::EvaluationOperator(EvaluationOperator::Dot)| => span
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct DotDotType (desc: "dotdot") -> Span {
|
|
||||||
|span, Token::EvaluationOperator(EvaluationOperator::DotDot)| => span
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct CompareOperatorType (desc: "compare operator") -> (Span, CompareOperator) {
|
|
||||||
|span, Token::CompareOperator(operator)| => (span, operator)
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct ExternalWordType (desc: "external word") -> Span {
|
|
||||||
|span, Token::ExternalWord| => span
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct ExternalCommandType (desc: "external command") -> (Span, Span) {
|
|
||||||
|outer, Token::ExternalCommand(inner)| => (inner, outer)
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct CommentType (desc: "comment") -> (Comment, Span) {
|
|
||||||
|outer, Token::Comment(comment)| => (comment, outer)
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct SeparatorType (desc: "separator") -> Span {
|
|
||||||
|span, Token::Separator| => span
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct WhitespaceType (desc: "whitespace") -> Span {
|
|
||||||
|span, Token::Whitespace| => span
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct WordType (desc: "word") -> Span {
|
|
||||||
|span, Token::Bare| => span
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct ItVarType (desc: "$it") -> (Span, Span) {
|
|
||||||
|outer, Token::ItVariable(inner)| => (inner, outer)
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct VarType (desc: "variable") -> (Span, Span) {
|
|
||||||
|outer, Token::Variable(inner)| => (inner, outer)
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct PipelineType (desc: "pipeline") -> Pipeline {
|
|
||||||
Token::Pipeline(pipeline) => pipeline
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct BlockType (desc: "block") -> DelimitedNode {
|
|
||||||
Token::Delimited(block @ DelimitedNode { delimiter: Delimiter::Brace, .. }) => block
|
|
||||||
});
|
|
||||||
|
|
||||||
token_type!(struct SquareType (desc: "square") -> DelimitedNode {
|
|
||||||
Token::Delimited(square @ DelimitedNode { delimiter: Delimiter::Square, .. }) => square
|
|
||||||
});
|
|
||||||
|
|
||||||
pub trait TokenType {
|
|
||||||
type Output;
|
|
||||||
|
|
||||||
fn desc(&self) -> Cow<'static, str>;
|
|
||||||
|
|
||||||
fn extract_token_value(
|
|
||||||
&self,
|
|
||||||
token: &SpannedToken,
|
|
||||||
err: ParseErrorFn<Self::Output>,
|
|
||||||
) -> Result<Self::Output, ParseError>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Token {
|
|
||||||
pub fn into_spanned(self, span: impl Into<Span>) -> SpannedToken {
|
|
||||||
SpannedToken {
|
|
||||||
unspanned: self,
|
|
||||||
span: span.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
|
||||||
pub struct SpannedToken {
|
|
||||||
#[get = "pub"]
|
|
||||||
unspanned: Token,
|
|
||||||
span: Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for SpannedToken {
|
|
||||||
type Target = Token;
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.unspanned
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for SpannedToken {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ShellTypeName for SpannedToken {
|
|
||||||
fn type_name(&self) -> &'static str {
|
|
||||||
self.unspanned.type_name()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for SpannedToken {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Number(number) => number.pretty_debug(source),
|
|
||||||
Token::CompareOperator(operator) => operator.pretty_debug(source),
|
|
||||||
Token::EvaluationOperator(operator) => operator.pretty_debug(source),
|
|
||||||
Token::String(_) | Token::GlobPattern | Token::Bare => {
|
|
||||||
b::primitive(self.span.slice(source))
|
|
||||||
}
|
|
||||||
Token::Variable(_) => b::var(self.span.slice(source)),
|
|
||||||
Token::ItVariable(_) => b::keyword(self.span.slice(source)),
|
|
||||||
Token::ExternalCommand(_) => b::description(self.span.slice(source)),
|
|
||||||
Token::ExternalWord => b::description(self.span.slice(source)),
|
|
||||||
Token::Call(call) => call.pretty_debug(source),
|
|
||||||
Token::Delimited(delimited) => delimited.pretty_debug(source),
|
|
||||||
Token::Pipeline(pipeline) => pipeline.pretty_debug(source),
|
|
||||||
Token::Flag(flag) => flag.pretty_debug(source),
|
|
||||||
Token::Garbage => b::error(self.span.slice(source)),
|
|
||||||
Token::Whitespace => b::typed(
|
|
||||||
"whitespace",
|
|
||||||
b::description(format!("{:?}", self.span.slice(source))),
|
|
||||||
),
|
|
||||||
Token::Separator => b::typed(
|
|
||||||
"separator",
|
|
||||||
b::description(format!("{:?}", self.span.slice(source))),
|
|
||||||
),
|
|
||||||
Token::Comment(comment) => {
|
|
||||||
b::typed("comment", b::description(comment.text.slice(source)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ShellTypeName for Token {
|
|
||||||
fn type_name(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Token::Number(_) => "number",
|
|
||||||
Token::CompareOperator(_) => "comparison operator",
|
|
||||||
Token::EvaluationOperator(EvaluationOperator::Dot) => "dot",
|
|
||||||
Token::EvaluationOperator(EvaluationOperator::DotDot) => "dot dot",
|
|
||||||
Token::String(_) => "string",
|
|
||||||
Token::Variable(_) => "variable",
|
|
||||||
Token::ItVariable(_) => "it variable",
|
|
||||||
Token::ExternalCommand(_) => "external command",
|
|
||||||
Token::ExternalWord => "external word",
|
|
||||||
Token::GlobPattern => "glob pattern",
|
|
||||||
Token::Bare => "word",
|
|
||||||
Token::Call(_) => "command",
|
|
||||||
Token::Delimited(d) => d.type_name(),
|
|
||||||
Token::Pipeline(_) => "pipeline",
|
|
||||||
Token::Flag(_) => "flag",
|
|
||||||
Token::Garbage => "garbage",
|
|
||||||
Token::Whitespace => "whitespace",
|
|
||||||
Token::Separator => "separator",
|
|
||||||
Token::Comment(_) => "comment",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&SpannedToken> for Span {
|
|
||||||
fn from(token: &SpannedToken) -> Span {
|
|
||||||
token.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SpannedToken {
|
|
||||||
pub fn as_external_arg(&self, source: &Text) -> String {
|
|
||||||
self.span().slice(source).to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn source<'a>(&self, source: &'a Text) -> &'a str {
|
|
||||||
self.span().slice(source)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Variable(inner_span) => Ok((self.span(), *inner_span)),
|
|
||||||
_ => Err(ShellError::type_error("variable", self.spanned_type_name())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_bare(&self) -> bool {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Bare => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_string(&self) -> bool {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::String(_) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_number(&self) -> bool {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Number(_) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_int(&self) -> bool {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Number(RawNumber::Int(_)) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_string(&self) -> Option<(Span, Span)> {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::String(inner_span) => Some((self.span(), *inner_span)),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_pattern(&self) -> bool {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::GlobPattern => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_word(&self) -> bool {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Bare => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_dot(&self) -> bool {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::EvaluationOperator(EvaluationOperator::Dot) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_separator(&self) -> bool {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Separator => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_block(&self) -> Option<(Spanned<&[SpannedToken]>, (Span, Span))> {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Delimited(DelimitedNode {
|
|
||||||
delimiter,
|
|
||||||
children,
|
|
||||||
spans,
|
|
||||||
}) if *delimiter == Delimiter::Brace => {
|
|
||||||
Some(((&children[..]).spanned(self.span()), *spans))
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_external(&self) -> bool {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::ExternalCommand(..) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn as_flag(&self, value: &str, short: Option<char>, source: &Text) -> Option<Flag> {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Flag(flag) => {
|
|
||||||
let name = flag.name().slice(source);
|
|
||||||
|
|
||||||
match flag.kind {
|
|
||||||
FlagKind::Longhand if value == name => Some(*flag),
|
|
||||||
FlagKind::Shorthand => {
|
|
||||||
if let Some(short_hand) = short {
|
|
||||||
if short_hand.to_string() == name {
|
|
||||||
return Some(*flag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_pipeline(&self) -> Result<Pipeline, ParseError> {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Pipeline(pipeline) => Ok(pipeline.clone()),
|
|
||||||
_ => Err(ParseError::mismatch("pipeline", self.spanned_type_name())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_whitespace(&self) -> bool {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Whitespace => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
|
||||||
#[get = "pub(crate)"]
|
|
||||||
pub struct DelimitedNode {
|
|
||||||
pub(crate) delimiter: Delimiter,
|
|
||||||
pub(crate) spans: (Span, Span),
|
|
||||||
pub(crate) children: Vec<SpannedToken>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for DelimitedNode {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.spans.0.until(self.spans.1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebugWithSource for DelimitedNode {
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
|
||||||
b::delimit(
|
|
||||||
self.delimiter.open(),
|
|
||||||
b::intersperse(
|
|
||||||
self.children.iter().map(|child| child.pretty_debug(source)),
|
|
||||||
b::space(),
|
|
||||||
),
|
|
||||||
self.delimiter.close(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DelimitedNode {
|
|
||||||
pub fn type_name(&self) -> &'static str {
|
|
||||||
match self.delimiter {
|
|
||||||
Delimiter::Brace => "braced expression",
|
|
||||||
Delimiter::Paren => "parenthesized expression",
|
|
||||||
Delimiter::Square => "array literal or index operator",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
|
||||||
pub enum Delimiter {
|
|
||||||
Paren,
|
|
||||||
Brace,
|
|
||||||
Square,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Delimiter {
|
|
||||||
pub(crate) fn open(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Delimiter::Paren => "(",
|
|
||||||
Delimiter::Brace => "{",
|
|
||||||
Delimiter::Square => "[",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn close(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Delimiter::Paren => ")",
|
|
||||||
Delimiter::Brace => "}",
|
|
||||||
Delimiter::Square => "]",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
|
||||||
#[get = "pub(crate)"]
|
|
||||||
pub struct PathNode {
|
|
||||||
head: Box<SpannedToken>,
|
|
||||||
tail: Vec<SpannedToken>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
impl SpannedToken {
|
|
||||||
pub fn expect_external(&self) -> Span {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::ExternalCommand(span) => *span,
|
|
||||||
_ => panic!(
|
|
||||||
"Only call expect_external if you checked is_external first, found {:?}",
|
|
||||||
self
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expect_number(&self) -> RawNumber {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Number(raw_number) => *raw_number,
|
|
||||||
other => panic!("Expected number, found {:?}", other),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expect_string(&self) -> (Span, Span) {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::String(inner_span) => (self.span(), *inner_span),
|
|
||||||
other => panic!("Expected string, found {:?}", other),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expect_list(&self) -> Spanned<Vec<SpannedToken>> {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Pipeline(pipeline) => pipeline
|
|
||||||
.parts()
|
|
||||||
.iter()
|
|
||||||
.flat_map(|part| part.tokens())
|
|
||||||
.cloned()
|
|
||||||
.collect::<Vec<SpannedToken>>()
|
|
||||||
.spanned(self.span()),
|
|
||||||
_ => panic!("Expected list, found {:?}", self),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expect_pattern(&self) -> Span {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::GlobPattern => self.span(),
|
|
||||||
_ => panic!("Expected pattern, found {:?}", self),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expect_var(&self) -> (Span, Span) {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Variable(inner_span) => (self.span(), *inner_span),
|
|
||||||
Token::ItVariable(inner_span) => (self.span(), *inner_span),
|
|
||||||
other => panic!("Expected var, found {:?}", other),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expect_dot(&self) -> Span {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::EvaluationOperator(EvaluationOperator::Dot) => self.span(),
|
|
||||||
other => panic!("Expected dot, found {:?}", other),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expect_bare(&self) -> Span {
|
|
||||||
match self.unspanned() {
|
|
||||||
Token::Bare => self.span(),
|
|
||||||
_ => panic!("Expected bare, found {:?}", self),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,514 +0,0 @@
|
|||||||
use crate::parse::call_node::CallNode;
|
|
||||||
use crate::parse::comment::Comment;
|
|
||||||
use crate::parse::flag::{Flag, FlagKind};
|
|
||||||
use crate::parse::number::RawNumber;
|
|
||||||
use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
|
||||||
use crate::parse::pipeline::{Pipeline, PipelineElement};
|
|
||||||
use crate::parse::token_tree::{DelimitedNode, Delimiter, SpannedToken, Token};
|
|
||||||
use bigdecimal::BigDecimal;
|
|
||||||
use nu_source::{Span, Spanned, SpannedItem};
|
|
||||||
use num_bigint::BigInt;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct TokenTreeBuilder {
|
|
||||||
pos: usize,
|
|
||||||
output: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenTreeBuilder {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> SpannedToken + 'static>;
|
|
||||||
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Spanned<CallNode> + 'static>;
|
|
||||||
|
|
||||||
impl TokenTreeBuilder {
|
|
||||||
pub fn build(block: impl FnOnce(&mut Self) -> SpannedToken) -> (SpannedToken, String) {
|
|
||||||
let mut builder = TokenTreeBuilder::new();
|
|
||||||
let node = block(&mut builder);
|
|
||||||
(node, builder.output)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_spanned<T>(
|
|
||||||
&mut self,
|
|
||||||
callback: impl FnOnce(&mut TokenTreeBuilder) -> T,
|
|
||||||
) -> Spanned<T> {
|
|
||||||
let start = self.pos;
|
|
||||||
let ret = callback(self);
|
|
||||||
let end = self.pos;
|
|
||||||
|
|
||||||
ret.spanned(Span::new(start, end))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn pipeline(input: Vec<Vec<CurriedToken>>) -> CurriedToken {
|
|
||||||
Box::new(move |b| {
|
|
||||||
let start = b.pos;
|
|
||||||
|
|
||||||
let mut out: Vec<PipelineElement> = vec![];
|
|
||||||
|
|
||||||
let mut input = input.into_iter().peekable();
|
|
||||||
let head = input
|
|
||||||
.next()
|
|
||||||
.expect("A pipeline must contain at least one element");
|
|
||||||
|
|
||||||
let pipe = None;
|
|
||||||
let head = b.build_spanned(|b| head.into_iter().map(|node| node(b)).collect());
|
|
||||||
|
|
||||||
out.push(PipelineElement::new(pipe, head));
|
|
||||||
|
|
||||||
loop {
|
|
||||||
match input.next() {
|
|
||||||
None => break,
|
|
||||||
Some(node) => {
|
|
||||||
let pipe = Some(b.consume_span("|"));
|
|
||||||
let node =
|
|
||||||
b.build_spanned(|b| node.into_iter().map(|node| node(b)).collect());
|
|
||||||
|
|
||||||
out.push(PipelineElement::new(pipe, node));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let end = b.pos;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_pipeline(out, Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::Pipeline(Pipeline::new(input)).into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
|
||||||
Box::new(move |b| {
|
|
||||||
let start = b.pos;
|
|
||||||
let tokens = input.into_iter().map(|i| i(b)).collect();
|
|
||||||
let end = b.pos;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_token_list(tokens, Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_token_list(input: Vec<SpannedToken>, span: impl Into<Span>) -> SpannedToken {
|
|
||||||
let span = span.into();
|
|
||||||
Token::Pipeline(Pipeline::new(vec![PipelineElement::new(
|
|
||||||
None,
|
|
||||||
input.spanned(span),
|
|
||||||
)]))
|
|
||||||
.into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn garbage(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume(&input);
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_garbage(Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_garbage(span: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::Garbage.into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn op(input: impl Into<CompareOperator>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume(input.as_str());
|
|
||||||
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_cmp_op(input, Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_cmp_op(
|
|
||||||
input: impl Into<CompareOperator>,
|
|
||||||
span: impl Into<Span>,
|
|
||||||
) -> SpannedToken {
|
|
||||||
Token::CompareOperator(input.into()).into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn dot() -> CurriedToken {
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume(".");
|
|
||||||
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_eval_op(".", Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn dotdot() -> CurriedToken {
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume("..");
|
|
||||||
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_eval_op("..", Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_eval_op(
|
|
||||||
input: impl Into<EvaluationOperator>,
|
|
||||||
span: impl Into<Span>,
|
|
||||||
) -> SpannedToken {
|
|
||||||
Token::EvaluationOperator(input.into()).into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, _) = b.consume("\"");
|
|
||||||
let (inner_start, inner_end) = b.consume(&input);
|
|
||||||
let (_, end) = b.consume("\"");
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_string(
|
|
||||||
Span::new(inner_start, inner_end),
|
|
||||||
Span::new(start, end),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::String(input.into()).into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn bare(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume(&input);
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_bare(Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_bare(span: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::Bare.into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn pattern(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume(&input);
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_pattern(Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_pattern(input: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::GlobPattern.into_spanned(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn external_word(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume(&input);
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_external_word(Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_external_word(input: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::ExternalWord.into_spanned(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (outer_start, _) = b.consume("^");
|
|
||||||
let (inner_start, end) = b.consume(&input);
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_external_command(
|
|
||||||
Span::new(inner_start, end),
|
|
||||||
Span::new(outer_start, end),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_external_command(
|
|
||||||
inner: impl Into<Span>,
|
|
||||||
outer: impl Into<Span>,
|
|
||||||
) -> SpannedToken {
|
|
||||||
Token::ExternalCommand(inner.into()).into_spanned(outer)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
|
||||||
let int = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume(&int.to_string());
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_number(
|
|
||||||
RawNumber::Int(Span::new(start, end)),
|
|
||||||
Span::new(start, end),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decimal(input: impl Into<BigDecimal>) -> CurriedToken {
|
|
||||||
let decimal = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume(&decimal.to_string());
|
|
||||||
b.pos = end;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_number(
|
|
||||||
RawNumber::Decimal(Span::new(start, end)),
|
|
||||||
Span::new(start, end),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::Number(input.into()).into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn var(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, _) = b.consume("$");
|
|
||||||
let (inner_start, end) = b.consume(&input);
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_var(Span::new(inner_start, end), Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::Variable(input.into()).into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn it_var() -> CurriedToken {
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, _) = b.consume("$");
|
|
||||||
let (inner_start, end) = b.consume("it");
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_it_var(Span::new(inner_start, end), Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_it_var(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::ItVariable(input.into()).into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn flag(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, _) = b.consume("--");
|
|
||||||
let (inner_start, end) = b.consume(&input);
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_flag(Span::new(inner_start, end), Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
|
||||||
let span = span.into();
|
|
||||||
Token::Flag(Flag::new(FlagKind::Longhand, input.into())).into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, _) = b.consume("-");
|
|
||||||
let (inner_start, end) = b.consume(&input);
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_shorthand((inner_start, end), (start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
|
||||||
let span = span.into();
|
|
||||||
|
|
||||||
Token::Flag(Flag::new(FlagKind::Shorthand, input.into())).into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
|
||||||
Box::new(move |b| {
|
|
||||||
let start = b.pos;
|
|
||||||
|
|
||||||
let head_node = head(b);
|
|
||||||
|
|
||||||
let mut nodes = vec![head_node];
|
|
||||||
for item in input {
|
|
||||||
nodes.push(item(b));
|
|
||||||
}
|
|
||||||
|
|
||||||
let end = b.pos;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_call(nodes, Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_call(input: Vec<SpannedToken>, span: impl Into<Span>) -> Spanned<CallNode> {
|
|
||||||
if input.is_empty() {
|
|
||||||
panic!("BUG: spanned call (TODO)")
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut input = input.into_iter();
|
|
||||||
|
|
||||||
if let Some(head) = input.next() {
|
|
||||||
let tail = input.collect();
|
|
||||||
|
|
||||||
CallNode::new(Box::new(head), tail).spanned(span.into())
|
|
||||||
} else {
|
|
||||||
unreachable!("Internal error: spanned_call failed")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consume_delimiter(
|
|
||||||
&mut self,
|
|
||||||
input: Vec<CurriedToken>,
|
|
||||||
_open: &str,
|
|
||||||
_close: &str,
|
|
||||||
) -> (Span, Span, Span, Vec<SpannedToken>) {
|
|
||||||
let (start_open_paren, end_open_paren) = self.consume("(");
|
|
||||||
let mut output = vec![];
|
|
||||||
for item in input {
|
|
||||||
output.push(item(self));
|
|
||||||
}
|
|
||||||
|
|
||||||
let (start_close_paren, end_close_paren) = self.consume(")");
|
|
||||||
|
|
||||||
let open = Span::new(start_open_paren, end_open_paren);
|
|
||||||
let close = Span::new(start_close_paren, end_close_paren);
|
|
||||||
let whole = Span::new(start_open_paren, end_close_paren);
|
|
||||||
|
|
||||||
(open, close, whole, output)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parens(input: Vec<CurriedToken>) -> CurriedToken {
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (open, close, whole, output) = b.consume_delimiter(input, "(", ")");
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_parens(output, (open, close), whole)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_parens(
|
|
||||||
input: impl Into<Vec<SpannedToken>>,
|
|
||||||
spans: (Span, Span),
|
|
||||||
span: impl Into<Span>,
|
|
||||||
) -> SpannedToken {
|
|
||||||
Token::Delimited(DelimitedNode::new(Delimiter::Paren, spans, input.into()))
|
|
||||||
.into_spanned(span.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]");
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_square(tokens, (open, close), whole)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_square(
|
|
||||||
input: impl Into<Vec<SpannedToken>>,
|
|
||||||
spans: (Span, Span),
|
|
||||||
span: impl Into<Span>,
|
|
||||||
) -> SpannedToken {
|
|
||||||
Token::Delimited(DelimitedNode::new(Delimiter::Square, spans, input.into()))
|
|
||||||
.into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}");
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_brace(tokens, (open, close), whole)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_brace(
|
|
||||||
input: impl Into<Vec<SpannedToken>>,
|
|
||||||
spans: (Span, Span),
|
|
||||||
span: impl Into<Span>,
|
|
||||||
) -> SpannedToken {
|
|
||||||
Token::Delimited(DelimitedNode::new(Delimiter::Brace, spans, input.into()))
|
|
||||||
.into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn sp() -> CurriedToken {
|
|
||||||
Box::new(|b| {
|
|
||||||
let (start, end) = b.consume(" ");
|
|
||||||
Token::Whitespace.into_spanned((start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ws(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume(&input);
|
|
||||||
TokenTreeBuilder::spanned_ws(Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_ws(span: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::Whitespace.into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn sep(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let (start, end) = b.consume(&input);
|
|
||||||
TokenTreeBuilder::spanned_sep(Span::new(start, end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_sep(span: impl Into<Span>) -> SpannedToken {
|
|
||||||
Token::Separator.into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn comment(input: impl Into<String>) -> CurriedToken {
|
|
||||||
let input = input.into();
|
|
||||||
|
|
||||||
Box::new(move |b| {
|
|
||||||
let outer_start = b.pos;
|
|
||||||
b.consume("#");
|
|
||||||
let (start, end) = b.consume(&input);
|
|
||||||
let outer_end = b.pos;
|
|
||||||
|
|
||||||
TokenTreeBuilder::spanned_comment((start, end), (outer_start, outer_end))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
|
||||||
let span = span.into();
|
|
||||||
|
|
||||||
Token::Comment(Comment::line(input)).into_spanned(span)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consume(&mut self, input: &str) -> (usize, usize) {
|
|
||||||
let start = self.pos;
|
|
||||||
self.pos += input.len();
|
|
||||||
self.output.push_str(input);
|
|
||||||
(start, self.pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consume_span(&mut self, input: &str) -> Span {
|
|
||||||
let start = self.pos;
|
|
||||||
self.pos += input.len();
|
|
||||||
self.output.push_str(input);
|
|
||||||
Span::new(start, self.pos)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,127 +0,0 @@
|
|||||||
use crate::parse::parser::Number;
|
|
||||||
use nu_protocol::{Primitive, UntaggedValue};
|
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
|
||||||
use num_traits::ToPrimitive;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
|
||||||
pub enum Unit {
|
|
||||||
// Filesize units
|
|
||||||
Byte,
|
|
||||||
Kilobyte,
|
|
||||||
Megabyte,
|
|
||||||
Gigabyte,
|
|
||||||
Terabyte,
|
|
||||||
Petabyte,
|
|
||||||
|
|
||||||
// Duration units
|
|
||||||
Second,
|
|
||||||
Minute,
|
|
||||||
Hour,
|
|
||||||
Day,
|
|
||||||
Week,
|
|
||||||
Month,
|
|
||||||
Year,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettyDebug for Unit {
|
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
|
||||||
b::keyword(self.as_str())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn convert_number_to_u64(number: &Number) -> u64 {
|
|
||||||
match number {
|
|
||||||
Number::Int(big_int) => {
|
|
||||||
if let Some(x) = big_int.to_u64() {
|
|
||||||
x
|
|
||||||
} else {
|
|
||||||
unreachable!("Internal error: convert_number_to_u64 given incompatible number")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Number::Decimal(big_decimal) => {
|
|
||||||
if let Some(x) = big_decimal.to_u64() {
|
|
||||||
x
|
|
||||||
} else {
|
|
||||||
unreachable!("Internal error: convert_number_to_u64 given incompatible number")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Unit {
|
|
||||||
pub fn as_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Unit::Byte => "B",
|
|
||||||
Unit::Kilobyte => "KB",
|
|
||||||
Unit::Megabyte => "MB",
|
|
||||||
Unit::Gigabyte => "GB",
|
|
||||||
Unit::Terabyte => "TB",
|
|
||||||
Unit::Petabyte => "PB",
|
|
||||||
Unit::Second => "s",
|
|
||||||
Unit::Minute => "m",
|
|
||||||
Unit::Hour => "h",
|
|
||||||
Unit::Day => "d",
|
|
||||||
Unit::Week => "w",
|
|
||||||
Unit::Month => "M",
|
|
||||||
Unit::Year => "y",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn compute(self, size: &Number) -> UntaggedValue {
|
|
||||||
let size = size.clone();
|
|
||||||
|
|
||||||
match self {
|
|
||||||
Unit::Byte => number(size),
|
|
||||||
Unit::Kilobyte => number(size * 1024),
|
|
||||||
Unit::Megabyte => number(size * 1024 * 1024),
|
|
||||||
Unit::Gigabyte => number(size * 1024 * 1024 * 1024),
|
|
||||||
Unit::Terabyte => number(size * 1024 * 1024 * 1024 * 1024),
|
|
||||||
Unit::Petabyte => number(size * 1024 * 1024 * 1024 * 1024 * 1024),
|
|
||||||
Unit::Second => duration(convert_number_to_u64(&size)),
|
|
||||||
Unit::Minute => duration(60 * convert_number_to_u64(&size)),
|
|
||||||
Unit::Hour => duration(60 * 60 * convert_number_to_u64(&size)),
|
|
||||||
Unit::Day => duration(24 * 60 * 60 * convert_number_to_u64(&size)),
|
|
||||||
Unit::Week => duration(7 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
|
||||||
Unit::Month => duration(30 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
|
||||||
Unit::Year => duration(365 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn number(number: impl Into<Number>) -> UntaggedValue {
|
|
||||||
let number = number.into();
|
|
||||||
|
|
||||||
match number {
|
|
||||||
Number::Int(int) => UntaggedValue::Primitive(Primitive::Int(int)),
|
|
||||||
Number::Decimal(decimal) => UntaggedValue::Primitive(Primitive::Decimal(decimal)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn duration(secs: u64) -> UntaggedValue {
|
|
||||||
UntaggedValue::Primitive(Primitive::Duration(secs))
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for Unit {
|
|
||||||
type Err = ();
|
|
||||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
|
||||||
match input {
|
|
||||||
"B" | "b" => Ok(Unit::Byte),
|
|
||||||
"KB" | "kb" | "Kb" | "K" | "k" => Ok(Unit::Kilobyte),
|
|
||||||
"MB" | "mb" | "Mb" => Ok(Unit::Megabyte),
|
|
||||||
"GB" | "gb" | "Gb" => Ok(Unit::Gigabyte),
|
|
||||||
"TB" | "tb" | "Tb" => Ok(Unit::Terabyte),
|
|
||||||
"PB" | "pb" | "Pb" => Ok(Unit::Petabyte),
|
|
||||||
"s" => Ok(Unit::Second),
|
|
||||||
"m" => Ok(Unit::Minute),
|
|
||||||
"h" => Ok(Unit::Hour),
|
|
||||||
"d" => Ok(Unit::Day),
|
|
||||||
"w" => Ok(Unit::Week),
|
|
||||||
"M" => Ok(Unit::Month),
|
|
||||||
"y" => Ok(Unit::Year),
|
|
||||||
_ => Err(()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,2 +0,0 @@
|
|||||||
pub(crate) mod parser;
|
|
||||||
pub(crate) mod shape;
|
|
@ -1,272 +0,0 @@
|
|||||||
use crate::parse::number::RawNumber;
|
|
||||||
use crate::parse::parser::{is_boundary, to_list};
|
|
||||||
use crate::parse::token_tree::SpannedToken;
|
|
||||||
use crate::parse::token_tree_builder::TokenTreeBuilder;
|
|
||||||
use nu_source::{HasSpan, NomSpan, Span, Spanned, SpannedItem};
|
|
||||||
|
|
||||||
use nom::branch::alt;
|
|
||||||
use nom::bytes::complete::{escaped, tag};
|
|
||||||
use nom::character::complete::*;
|
|
||||||
use nom::combinator::*;
|
|
||||||
use nom::multi::*;
|
|
||||||
use nom::IResult;
|
|
||||||
use nom_tracable::tracable_parser;
|
|
||||||
|
|
||||||
#[tracable_parser]
|
|
||||||
pub fn parse_line_with_separator<'a, 'b>(
|
|
||||||
separator: &'b str,
|
|
||||||
input: NomSpan<'a>,
|
|
||||||
) -> IResult<NomSpan<'a>, Spanned<Vec<SpannedToken>>> {
|
|
||||||
let start = input.offset;
|
|
||||||
let mut nodes = vec![];
|
|
||||||
let mut next_input = input;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let node_result = to_list(leaf(separator))(next_input);
|
|
||||||
|
|
||||||
let (after_node_input, next_nodes) = match node_result {
|
|
||||||
Err(_) => break,
|
|
||||||
Ok((after_node_input, next_node)) => (after_node_input, next_node),
|
|
||||||
};
|
|
||||||
|
|
||||||
nodes.extend(next_nodes);
|
|
||||||
|
|
||||||
match separated_by(separator)(after_node_input) {
|
|
||||||
Err(_) => {
|
|
||||||
next_input = after_node_input;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
Ok((input, s)) => {
|
|
||||||
nodes.push(s);
|
|
||||||
next_input = input;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let end = next_input.offset;
|
|
||||||
|
|
||||||
Ok((next_input, nodes.spanned(Span::new(start, end))))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracable_parser]
|
|
||||||
pub fn fallback_number_without(c: char) -> impl Fn(NomSpan) -> IResult<NomSpan, SpannedToken> {
|
|
||||||
move |input| {
|
|
||||||
let (input, number) = fallback_raw_number_without(c)(input)?;
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
input,
|
|
||||||
TokenTreeBuilder::spanned_number(number, number.span()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracable_parser]
|
|
||||||
pub fn fallback_raw_number_without(c: char) -> impl Fn(NomSpan) -> IResult<NomSpan, RawNumber> {
|
|
||||||
move |input| {
|
|
||||||
let _anchoral = input;
|
|
||||||
let start = input.offset;
|
|
||||||
let (input, _neg) = opt(tag("-"))(input)?;
|
|
||||||
let (input, _head) = digit1(input)?;
|
|
||||||
let after_int_head = input;
|
|
||||||
|
|
||||||
match input.fragment.chars().next() {
|
|
||||||
None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
|
||||||
Some('.') => (),
|
|
||||||
other if is_boundary(other) || other == Some(c) => {
|
|
||||||
return Ok((input, RawNumber::int(Span::new(start, input.offset))))
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
return Err(nom::Err::Error(nom::error::make_error(
|
|
||||||
input,
|
|
||||||
nom::error::ErrorKind::Tag,
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let dot: IResult<NomSpan, NomSpan, (NomSpan, nom::error::ErrorKind)> = tag(".")(input);
|
|
||||||
|
|
||||||
let input = match dot {
|
|
||||||
Ok((input, _dot)) => input,
|
|
||||||
|
|
||||||
// it's just an integer
|
|
||||||
Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
|
||||||
};
|
|
||||||
|
|
||||||
let tail_digits_result: IResult<NomSpan, _> = digit1(input);
|
|
||||||
|
|
||||||
let (input, _tail) = match tail_digits_result {
|
|
||||||
Ok((input, tail)) => (input, tail),
|
|
||||||
Err(_) => {
|
|
||||||
return Ok((
|
|
||||||
after_int_head,
|
|
||||||
RawNumber::int((start, after_int_head.offset)),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let end = input.offset;
|
|
||||||
|
|
||||||
let next = input.fragment.chars().next();
|
|
||||||
|
|
||||||
if is_boundary(next) || next == Some(c) {
|
|
||||||
Ok((input, RawNumber::decimal(Span::new(start, end))))
|
|
||||||
} else {
|
|
||||||
Err(nom::Err::Error(nom::error::make_error(
|
|
||||||
input,
|
|
||||||
nom::error::ErrorKind::Tag,
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracable_parser]
|
|
||||||
pub fn leaf(c: &str) -> impl Fn(NomSpan) -> IResult<NomSpan, SpannedToken> + '_ {
|
|
||||||
move |input| {
|
|
||||||
let separator = c.chars().next().unwrap_or_else(|| ',');
|
|
||||||
|
|
||||||
let (input, node) = alt((
|
|
||||||
fallback_number_without(separator),
|
|
||||||
string,
|
|
||||||
fallback_string_without(c),
|
|
||||||
))(input)?;
|
|
||||||
|
|
||||||
Ok((input, node))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracable_parser]
|
|
||||||
pub fn separated_by(c: &str) -> impl Fn(NomSpan) -> IResult<NomSpan, SpannedToken> + '_ {
|
|
||||||
move |input| {
|
|
||||||
let left = input.offset;
|
|
||||||
let (input, _) = tag(c)(input)?;
|
|
||||||
let right = input.offset;
|
|
||||||
|
|
||||||
Ok((input, TokenTreeBuilder::spanned_sep(Span::new(left, right))))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracable_parser]
|
|
||||||
pub fn dq_string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
|
||||||
let start = input.offset;
|
|
||||||
let (input, _) = char('"')(input)?;
|
|
||||||
let start1 = input.offset;
|
|
||||||
let (input, _) = escaped(
|
|
||||||
none_of(r#"\""#),
|
|
||||||
'\\',
|
|
||||||
nom::character::complete::one_of(r#"\"rnt"#),
|
|
||||||
)(input)?;
|
|
||||||
|
|
||||||
let end1 = input.offset;
|
|
||||||
let (input, _) = char('"')(input)?;
|
|
||||||
let end = input.offset;
|
|
||||||
Ok((
|
|
||||||
input,
|
|
||||||
TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracable_parser]
|
|
||||||
pub fn sq_string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
|
||||||
let start = input.offset;
|
|
||||||
let (input, _) = char('\'')(input)?;
|
|
||||||
let start1 = input.offset;
|
|
||||||
let (input, _) = many0(none_of("\'"))(input)?;
|
|
||||||
let end1 = input.offset;
|
|
||||||
let (input, _) = char('\'')(input)?;
|
|
||||||
let end = input.offset;
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
input,
|
|
||||||
TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracable_parser]
|
|
||||||
pub fn string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
|
||||||
alt((sq_string, dq_string))(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracable_parser]
|
|
||||||
pub fn fallback_string_without(c: &str) -> impl Fn(NomSpan) -> IResult<NomSpan, SpannedToken> + '_ {
|
|
||||||
move |input| {
|
|
||||||
let start = input.offset;
|
|
||||||
let (input, _) = many0(none_of(c))(input)?;
|
|
||||||
let end = input.offset;
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
input,
|
|
||||||
TokenTreeBuilder::spanned_string(Span::new(start, end), Span::new(start, end)),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::parse::token_tree_builder::TokenTreeBuilder::{self, self as b};
|
|
||||||
use crate::parse::util::parse_line_with_separator;
|
|
||||||
use crate::test_support::apply;
|
|
||||||
use nom::IResult;
|
|
||||||
|
|
||||||
use crate::parse::pipeline::PipelineElement;
|
|
||||||
use crate::parse::token_tree::SpannedToken;
|
|
||||||
use nu_source::NomSpan;
|
|
||||||
use nu_source::PrettyDebugWithSource;
|
|
||||||
|
|
||||||
use pretty_assertions::assert_eq;
|
|
||||||
|
|
||||||
pub fn nodes(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
|
||||||
let (input, tokens) = parse_line_with_separator(",", input)?;
|
|
||||||
let span = tokens.span;
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
input,
|
|
||||||
TokenTreeBuilder::spanned_pipeline(vec![PipelineElement::new(None, tokens)], span),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn separators() {
|
|
||||||
equal_tokens! {
|
|
||||||
<nodes>
|
|
||||||
r#""name","lastname","age""# -> b::token_list(vec![
|
|
||||||
b::string("name"),
|
|
||||||
b::sep(","),
|
|
||||||
b::string("lastname"),
|
|
||||||
b::sep(","),
|
|
||||||
b::string("age")
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
equal_tokens! {
|
|
||||||
<nodes>
|
|
||||||
r#""Andrés","Robalino",12"# -> b::token_list(vec![
|
|
||||||
b::string("Andrés"),
|
|
||||||
b::sep(","),
|
|
||||||
b::string("Robalino"),
|
|
||||||
b::sep(","),
|
|
||||||
b::int(12)
|
|
||||||
])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn strings() {
|
|
||||||
equal_tokens! {
|
|
||||||
<nodes>
|
|
||||||
r#""andres""# -> b::token_list(vec![b::string("andres")])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn numbers() {
|
|
||||||
equal_tokens! {
|
|
||||||
<nodes>
|
|
||||||
"123" -> b::token_list(vec![b::int(123)])
|
|
||||||
}
|
|
||||||
|
|
||||||
equal_tokens! {
|
|
||||||
<nodes>
|
|
||||||
"-123" -> b::token_list(vec![b::int(-123)])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,91 +0,0 @@
|
|||||||
use crate::hir::{
|
|
||||||
self, syntax_shape::ExpandSyntax, syntax_shape::FlatShape, syntax_shape::NumberExpressionShape,
|
|
||||||
syntax_shape::StringShape,
|
|
||||||
};
|
|
||||||
use crate::hir::{Expression, TokensIterator};
|
|
||||||
use crate::parse::token_tree::SeparatorType;
|
|
||||||
|
|
||||||
use nu_errors::ParseError;
|
|
||||||
use nu_protocol::UntaggedValue;
|
|
||||||
use nu_source::Span;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct LineSeparatedShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for LineSeparatedShape {
|
|
||||||
type Output = Result<Vec<UntaggedValue>, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"any string line separated by"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
) -> Result<Vec<UntaggedValue>, ParseError> {
|
|
||||||
let source = token_nodes.source();
|
|
||||||
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
return Ok(vec![]);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut entries = vec![];
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let field = {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(NumberExpressionShape)
|
|
||||||
.or_else(|_| {
|
|
||||||
token_nodes
|
|
||||||
.expand_syntax(StringShape)
|
|
||||||
.map(|syntax| Expression::string(syntax.inner).into_expr(syntax.span))
|
|
||||||
})
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Ok(field) = field {
|
|
||||||
match &field.expr {
|
|
||||||
Expression::Literal(hir::Literal::Number(crate::Number::Int(i))) => {
|
|
||||||
entries.push(UntaggedValue::int(i.clone()))
|
|
||||||
}
|
|
||||||
Expression::Literal(hir::Literal::Number(crate::Number::Decimal(d))) => {
|
|
||||||
entries.push(UntaggedValue::decimal(d.clone()))
|
|
||||||
}
|
|
||||||
Expression::Literal(hir::Literal::String(span)) => {
|
|
||||||
if span.is_closed() {
|
|
||||||
entries.push(UntaggedValue::nothing())
|
|
||||||
} else {
|
|
||||||
entries.push(UntaggedValue::string(span.slice(&source)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match token_nodes.expand_infallible(SeparatorShape) {
|
|
||||||
Err(err) if !token_nodes.at_end() => return Err(err),
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(entries)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct SeparatorShape;
|
|
||||||
|
|
||||||
impl ExpandSyntax for SeparatorShape {
|
|
||||||
type Output = Result<Span, ParseError>;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"separated"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
|
||||||
token_nodes.expand_token(SeparatorType, |span| Ok((FlatShape::Separator, span)))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,4 +0,0 @@
|
|||||||
mod line_delimited_parser;
|
|
||||||
|
|
||||||
pub use line_delimited_parser::parser::parse_line_with_separator;
|
|
||||||
pub use line_delimited_parser::shape::LineSeparatedShape;
|
|
@ -1,408 +0,0 @@
|
|||||||
use crate::hir::syntax_shape::{
|
|
||||||
BackoffColoringMode, ExpandSyntax, MaybeSpaceShape, MaybeWhitespaceEof,
|
|
||||||
};
|
|
||||||
use crate::hir::SpannedExpression;
|
|
||||||
use crate::{
|
|
||||||
hir::{self, NamedArguments},
|
|
||||||
Flag,
|
|
||||||
};
|
|
||||||
use crate::{Token, TokensIterator};
|
|
||||||
use log::trace;
|
|
||||||
use nu_errors::{ArgumentError, ParseError};
|
|
||||||
use nu_protocol::{NamedType, PositionalType, Signature, SyntaxShape};
|
|
||||||
use nu_source::{HasFallibleSpan, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
|
||||||
|
|
||||||
type OptionalHeadTail = (Option<Vec<hir::SpannedExpression>>, Option<NamedArguments>);
|
|
||||||
|
|
||||||
pub fn parse_command_tail(
|
|
||||||
config: &Signature,
|
|
||||||
tail: &mut TokensIterator,
|
|
||||||
command_span: Span,
|
|
||||||
) -> Result<Option<OptionalHeadTail>, ParseError> {
|
|
||||||
let mut named = NamedArguments::new();
|
|
||||||
let mut found_error: Option<ParseError> = None;
|
|
||||||
let mut rest_signature = config.clone();
|
|
||||||
|
|
||||||
trace!(target: "nu::parse::trace_remaining", "");
|
|
||||||
|
|
||||||
trace_remaining("nodes", &tail);
|
|
||||||
|
|
||||||
for (name, kind) in &config.named {
|
|
||||||
trace!(target: "nu::parse::trace_remaining", "looking for {} : {:?}", name, kind);
|
|
||||||
|
|
||||||
match &kind.0 {
|
|
||||||
NamedType::Switch(s) => {
|
|
||||||
let switch = extract_switch(name, *s, tail);
|
|
||||||
|
|
||||||
match switch {
|
|
||||||
None => named.insert_switch(name, None),
|
|
||||||
Some((pos, flag)) => {
|
|
||||||
named.insert_switch(name, Some(*flag));
|
|
||||||
rest_signature.remove_named(name);
|
|
||||||
tail.color_shape(flag.color(flag.span));
|
|
||||||
tail.move_to(pos);
|
|
||||||
tail.expand_infallible(MaybeSpaceShape);
|
|
||||||
tail.move_to(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NamedType::Mandatory(s, syntax_type) => {
|
|
||||||
match extract_mandatory(config, name, *s, tail, command_span) {
|
|
||||||
Err(err) => {
|
|
||||||
// remember this error, but continue coloring
|
|
||||||
found_error = Some(err);
|
|
||||||
}
|
|
||||||
Ok((pos, flag)) => {
|
|
||||||
let result = expand_flag(tail, *syntax_type, flag, pos);
|
|
||||||
|
|
||||||
tail.move_to(0);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Ok(expr) => {
|
|
||||||
named.insert_mandatory(name, expr);
|
|
||||||
rest_signature.remove_named(name);
|
|
||||||
}
|
|
||||||
Err(_) => {
|
|
||||||
found_error = Some(ParseError::argument_error(
|
|
||||||
config.name.clone().spanned(flag.span),
|
|
||||||
ArgumentError::MissingValueForName(name.to_string()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NamedType::Optional(s, syntax_type) => {
|
|
||||||
match extract_optional(name, *s, tail) {
|
|
||||||
Err(err) => {
|
|
||||||
// remember this error, but continue coloring
|
|
||||||
found_error = Some(err);
|
|
||||||
}
|
|
||||||
Ok(Some((pos, flag))) => {
|
|
||||||
let result = expand_flag(tail, *syntax_type, flag, pos);
|
|
||||||
|
|
||||||
tail.move_to(0);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Ok(expr) => {
|
|
||||||
named.insert_optional(name, Some(expr));
|
|
||||||
rest_signature.remove_named(name);
|
|
||||||
}
|
|
||||||
Err(_) => {
|
|
||||||
found_error = Some(ParseError::argument_error(
|
|
||||||
config.name.clone().spanned(flag.span),
|
|
||||||
ArgumentError::MissingValueForName(name.to_string()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(None) => {
|
|
||||||
named.insert_optional(name, None);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
trace_remaining("after named", &tail);
|
|
||||||
|
|
||||||
let mut positional = vec![];
|
|
||||||
|
|
||||||
match continue_parsing_positionals(&config, tail, &mut rest_signature, command_span) {
|
|
||||||
Ok(positionals) => {
|
|
||||||
positional = positionals;
|
|
||||||
}
|
|
||||||
Err(reason) => {
|
|
||||||
if found_error.is_none() && !named.switch_present("help") {
|
|
||||||
found_error = Some(reason);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
trace_remaining("after positional", &tail);
|
|
||||||
|
|
||||||
if let Some((syntax_type, _)) = config.rest_positional {
|
|
||||||
let mut out = vec![];
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if found_error.is_some() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
tail.move_to(0);
|
|
||||||
|
|
||||||
trace_remaining("start rest", &tail);
|
|
||||||
eat_any_whitespace(tail);
|
|
||||||
trace_remaining("after whitespace", &tail);
|
|
||||||
|
|
||||||
if tail.at_end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
match tail.expand_syntax(syntax_type) {
|
|
||||||
Err(err) => found_error = Some(err),
|
|
||||||
Ok(next) => out.push(next),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
positional.extend(out);
|
|
||||||
}
|
|
||||||
|
|
||||||
trace_remaining("after rest", &tail);
|
|
||||||
|
|
||||||
if found_error.is_none() {
|
|
||||||
if let Some(unexpected_argument_error) = find_unexpected_tokens(config, tail, command_span)
|
|
||||||
{
|
|
||||||
found_error = Some(unexpected_argument_error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
eat_any_whitespace(tail);
|
|
||||||
|
|
||||||
// Consume any remaining tokens with backoff coloring mode
|
|
||||||
tail.expand_infallible(BackoffColoringMode::new(rest_signature.allowed()));
|
|
||||||
|
|
||||||
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
|
||||||
// this solution.
|
|
||||||
tail.sort_shapes();
|
|
||||||
|
|
||||||
trace!(target: "nu::parse::trace_remaining", "Constructed positional={:?} named={:?}", positional, named);
|
|
||||||
|
|
||||||
let positional = if positional.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(positional)
|
|
||||||
};
|
|
||||||
|
|
||||||
let named = if named.named.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(named)
|
|
||||||
};
|
|
||||||
|
|
||||||
trace!(target: "nu::parse::trace_remaining", "Normalized positional={:?} named={:?}", positional, named);
|
|
||||||
|
|
||||||
if let Some(err) = found_error {
|
|
||||||
return Err(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Some((positional, named)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn continue_parsing_positionals(
|
|
||||||
config: &Signature,
|
|
||||||
tail: &mut TokensIterator,
|
|
||||||
rest_signature: &mut Signature,
|
|
||||||
command_span: Span,
|
|
||||||
) -> Result<Vec<SpannedExpression>, ParseError> {
|
|
||||||
let mut positional = vec![];
|
|
||||||
|
|
||||||
eat_any_whitespace(tail);
|
|
||||||
|
|
||||||
for arg in &config.positional {
|
|
||||||
trace!(target: "nu::parse::trace_remaining", "Processing positional {:?}", arg);
|
|
||||||
|
|
||||||
tail.move_to(0);
|
|
||||||
|
|
||||||
let result = expand_spaced_expr(arg.0.syntax_type(), tail);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Err(_) => match &arg.0 {
|
|
||||||
PositionalType::Mandatory(..) => {
|
|
||||||
return Err(ParseError::argument_error(
|
|
||||||
config.name.clone().spanned(command_span),
|
|
||||||
ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
PositionalType::Optional(..) => {
|
|
||||||
if tail.expand_syntax(MaybeWhitespaceEof).is_ok() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Ok(result) => {
|
|
||||||
rest_signature.shift_positional();
|
|
||||||
positional.push(result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(positional)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn eat_any_whitespace(tail: &mut TokensIterator) {
|
|
||||||
loop {
|
|
||||||
match tail.expand_infallible(MaybeSpaceShape) {
|
|
||||||
None => break,
|
|
||||||
Some(_) => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_flag(
|
|
||||||
token_nodes: &mut TokensIterator,
|
|
||||||
syntax_type: SyntaxShape,
|
|
||||||
flag: Spanned<Flag>,
|
|
||||||
pos: usize,
|
|
||||||
) -> Result<SpannedExpression, ()> {
|
|
||||||
token_nodes.color_shape(flag.color(flag.span));
|
|
||||||
|
|
||||||
let result = token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
token_nodes.move_to(pos);
|
|
||||||
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
return Err(ParseError::unexpected_eof("flag", Span::unknown()));
|
|
||||||
}
|
|
||||||
|
|
||||||
let expr = expand_spaced_expr(syntax_type, token_nodes)?;
|
|
||||||
|
|
||||||
Ok(expr)
|
|
||||||
});
|
|
||||||
|
|
||||||
let expr = result.map_err(|_| ())?;
|
|
||||||
Ok(expr)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_spaced_expr<
|
|
||||||
T: HasFallibleSpan + PrettyDebugWithSource + Clone + std::fmt::Debug + 'static,
|
|
||||||
>(
|
|
||||||
syntax: impl ExpandSyntax<Output = Result<T, ParseError>>,
|
|
||||||
token_nodes: &mut TokensIterator,
|
|
||||||
) -> Result<T, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
token_nodes.expand_infallible(MaybeSpaceShape);
|
|
||||||
token_nodes.expand_syntax(syntax)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_switch(
|
|
||||||
name: &str,
|
|
||||||
short: Option<char>,
|
|
||||||
tokens: &mut hir::TokensIterator<'_>,
|
|
||||||
) -> Option<(usize, Spanned<Flag>)> {
|
|
||||||
let source = tokens.source();
|
|
||||||
let switch = tokens.extract(|t| {
|
|
||||||
t.as_flag(name, short, &source)
|
|
||||||
.map(|flag| flag.spanned(t.span()))
|
|
||||||
});
|
|
||||||
|
|
||||||
match switch {
|
|
||||||
None => None,
|
|
||||||
Some((pos, flag)) => {
|
|
||||||
tokens.remove(pos);
|
|
||||||
Some((pos, flag))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_mandatory(
|
|
||||||
config: &Signature,
|
|
||||||
name: &str,
|
|
||||||
short: Option<char>,
|
|
||||||
tokens: &mut hir::TokensIterator<'_>,
|
|
||||||
span: Span,
|
|
||||||
) -> Result<(usize, Spanned<Flag>), ParseError> {
|
|
||||||
let source = tokens.source();
|
|
||||||
let flag = tokens.extract(|t| {
|
|
||||||
t.as_flag(name, short, &source)
|
|
||||||
.map(|flag| flag.spanned(t.span()))
|
|
||||||
});
|
|
||||||
|
|
||||||
match flag {
|
|
||||||
None => Err(ParseError::argument_error(
|
|
||||||
config.name.clone().spanned(span),
|
|
||||||
ArgumentError::MissingMandatoryFlag(name.to_string()),
|
|
||||||
)),
|
|
||||||
|
|
||||||
Some((pos, flag)) => {
|
|
||||||
tokens.remove(pos);
|
|
||||||
Ok((pos, flag))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_optional(
|
|
||||||
name: &str,
|
|
||||||
short: Option<char>,
|
|
||||||
tokens: &mut hir::TokensIterator<'_>,
|
|
||||||
) -> Result<Option<(usize, Spanned<Flag>)>, ParseError> {
|
|
||||||
let source = tokens.source();
|
|
||||||
let flag = tokens.extract(|t| {
|
|
||||||
t.as_flag(name, short, &source)
|
|
||||||
.map(|flag| flag.spanned(t.span()))
|
|
||||||
});
|
|
||||||
|
|
||||||
match flag {
|
|
||||||
None => Ok(None),
|
|
||||||
Some((pos, flag)) => {
|
|
||||||
tokens.remove(pos);
|
|
||||||
Ok(Some((pos, flag)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_unexpected_tokens(
|
|
||||||
config: &Signature,
|
|
||||||
tail: &hir::TokensIterator,
|
|
||||||
command_span: Span,
|
|
||||||
) -> Option<ParseError> {
|
|
||||||
let mut tokens = tail.clone();
|
|
||||||
let source = tail.source();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
tokens.move_to(0);
|
|
||||||
|
|
||||||
if let Some(node) = tokens.peek().commit() {
|
|
||||||
match &node.unspanned() {
|
|
||||||
Token::Whitespace => {}
|
|
||||||
Token::Flag { .. } => {
|
|
||||||
return Some(ParseError::argument_error(
|
|
||||||
config.name.clone().spanned(command_span),
|
|
||||||
ArgumentError::UnexpectedFlag(Spanned {
|
|
||||||
item: node.span().slice(&source).to_string(),
|
|
||||||
span: node.span(),
|
|
||||||
}),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
return Some(ParseError::argument_error(
|
|
||||||
config.name.clone().spanned(command_span),
|
|
||||||
ArgumentError::UnexpectedArgument(Spanned {
|
|
||||||
item: node.span().slice(&source).to_string(),
|
|
||||||
span: node.span(),
|
|
||||||
}),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if tokens.at_end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>) {
|
|
||||||
let offset = tail.clone().span_at_cursor();
|
|
||||||
let source = tail.source();
|
|
||||||
|
|
||||||
trace!(
|
|
||||||
target: "nu::parse::trace_remaining",
|
|
||||||
"{} = {}",
|
|
||||||
desc,
|
|
||||||
itertools::join(
|
|
||||||
tail.debug_remaining()
|
|
||||||
.iter()
|
|
||||||
.map(|val| {
|
|
||||||
if val.span().start() == offset.start() {
|
|
||||||
format!("<|> %{}%", val.debug(&source))
|
|
||||||
} else {
|
|
||||||
format!("%{}%", val.debug(&source))
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
" "
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
114
crates/nu-parser/src/shapes.rs
Normal file
114
crates/nu-parser/src/shapes.rs
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
use crate::hir::*;
|
||||||
|
use crate::parse::{ClassifiedCommand, Commands};
|
||||||
|
use nu_protocol::UnspannedPathMember;
|
||||||
|
use nu_source::{Spanned, SpannedItem};
|
||||||
|
|
||||||
|
/// Converts a SpannedExpression into a spanned shape(s) ready for color-highlighting
|
||||||
|
pub fn expression_to_flat_shape(e: &SpannedExpression) -> Vec<Spanned<FlatShape>> {
|
||||||
|
match &e.expr {
|
||||||
|
Expression::Block(exprs) => {
|
||||||
|
let mut output = vec![];
|
||||||
|
for expr in exprs.iter() {
|
||||||
|
output.append(&mut expression_to_flat_shape(expr));
|
||||||
|
}
|
||||||
|
output
|
||||||
|
}
|
||||||
|
Expression::FilePath(_) => vec![FlatShape::Path.spanned(e.span)],
|
||||||
|
Expression::Garbage => vec![FlatShape::Garbage.spanned(e.span)],
|
||||||
|
Expression::List(exprs) => {
|
||||||
|
let mut output = vec![];
|
||||||
|
for expr in exprs.iter() {
|
||||||
|
output.append(&mut expression_to_flat_shape(expr));
|
||||||
|
}
|
||||||
|
output
|
||||||
|
}
|
||||||
|
Expression::Path(exprs) => {
|
||||||
|
let mut output = vec![];
|
||||||
|
output.append(&mut expression_to_flat_shape(&exprs.head));
|
||||||
|
for member in exprs.tail.iter() {
|
||||||
|
if let UnspannedPathMember::String(_) = &member.unspanned {
|
||||||
|
output.push(FlatShape::StringMember.spanned(member.span));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
output
|
||||||
|
}
|
||||||
|
Expression::Command(command) => vec![FlatShape::InternalCommand.spanned(*command)],
|
||||||
|
Expression::Literal(Literal::Bare) => vec![FlatShape::BareMember.spanned(e.span)],
|
||||||
|
Expression::Literal(Literal::ColumnPath(_)) => vec![FlatShape::Path.spanned(e.span)],
|
||||||
|
Expression::Literal(Literal::GlobPattern(_)) => {
|
||||||
|
vec![FlatShape::GlobPattern.spanned(e.span)]
|
||||||
|
}
|
||||||
|
Expression::Literal(Literal::Number(_)) => vec![FlatShape::Int.spanned(e.span)],
|
||||||
|
Expression::Literal(Literal::Operator(_)) => {
|
||||||
|
vec![FlatShape::CompareOperator.spanned(e.span)]
|
||||||
|
}
|
||||||
|
Expression::Literal(Literal::Size(number, unit)) => vec![FlatShape::Size {
|
||||||
|
number: number.span,
|
||||||
|
unit: unit.span,
|
||||||
|
}
|
||||||
|
.spanned(e.span)],
|
||||||
|
Expression::Literal(Literal::String(_)) => vec![FlatShape::String.spanned(e.span)],
|
||||||
|
Expression::ExternalWord => vec![FlatShape::ExternalWord.spanned(e.span)],
|
||||||
|
Expression::ExternalCommand(_) => vec![FlatShape::ExternalCommand.spanned(e.span)],
|
||||||
|
Expression::Synthetic(_) => vec![FlatShape::BareMember.spanned(e.span)],
|
||||||
|
Expression::Variable(_) => vec![FlatShape::Variable.spanned(e.span)],
|
||||||
|
Expression::Binary(binary) => {
|
||||||
|
let mut output = vec![];
|
||||||
|
output.append(&mut expression_to_flat_shape(&binary.left));
|
||||||
|
output.push(FlatShape::CompareOperator.spanned(binary.op.span));
|
||||||
|
output.append(&mut expression_to_flat_shape(&binary.right));
|
||||||
|
output
|
||||||
|
}
|
||||||
|
Expression::Range(range) => {
|
||||||
|
let mut output = vec![];
|
||||||
|
output.append(&mut expression_to_flat_shape(&range.left));
|
||||||
|
output.push(FlatShape::DotDot.spanned(range.dotdot));
|
||||||
|
output.append(&mut expression_to_flat_shape(&range.right));
|
||||||
|
output
|
||||||
|
}
|
||||||
|
Expression::Boolean(_) => vec![FlatShape::Keyword.spanned(e.span)],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a series of commands into a vec of spanned shapes ready for color-highlighting
|
||||||
|
pub fn shapes(commands: &Commands) -> Vec<Spanned<FlatShape>> {
|
||||||
|
let mut output = vec![];
|
||||||
|
|
||||||
|
for command in &commands.list {
|
||||||
|
match command {
|
||||||
|
ClassifiedCommand::Internal(internal) => {
|
||||||
|
output.append(&mut expression_to_flat_shape(&internal.args.head));
|
||||||
|
|
||||||
|
if let Some(positionals) = &internal.args.positional {
|
||||||
|
for positional_arg in positionals {
|
||||||
|
output.append(&mut expression_to_flat_shape(positional_arg));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(named) = &internal.args.named {
|
||||||
|
for (_, named_arg) in named.iter() {
|
||||||
|
match named_arg {
|
||||||
|
NamedValue::PresentSwitch(span) => {
|
||||||
|
output.push(FlatShape::Flag.spanned(*span));
|
||||||
|
}
|
||||||
|
NamedValue::Value(span, expr) => {
|
||||||
|
output.push(FlatShape::Flag.spanned(*span));
|
||||||
|
output.append(&mut expression_to_flat_shape(expr));
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ClassifiedCommand::External(external) => {
|
||||||
|
output.push(FlatShape::ExternalCommand.spanned(external.name_tag.span));
|
||||||
|
for arg in external.args.iter() {
|
||||||
|
output.push(FlatShape::ExternalWord.spanned(arg.tag.span));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user