fix more merge conflicts

This commit is contained in:
Tanishq Kancharla 2021-10-01 22:24:43 -04:00
commit e1a0ad2987
110 changed files with 11666 additions and 1153 deletions

510
Cargo.lock generated
View File

@ -2,6 +2,30 @@
# It is not intended for manual editing.
version = 3
[[package]]
name = "addr2line"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e61f2b7f93d2c7d2b08263acaa4a363b3e276806c68af6134c44f523bf1aacd"
dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "aho-corasick"
version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
dependencies = [
"memchr",
]
[[package]]
name = "ansi_term"
version = "0.12.1"
@ -25,12 +49,38 @@ dependencies = [
"wait-timeout",
]
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"libc",
"winapi",
]
[[package]]
name = "autocfg"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "backtrace"
version = "0.3.61"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7a905d892734eea339e896738c14b9afce22b5318f64b951e70bf3844419b01"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]]
name = "bitflags"
version = "1.3.2"
@ -48,6 +98,12 @@ dependencies = [
"regex-automata",
]
[[package]]
name = "cc"
version = "1.0.70"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d26a6ce4b6a484fa3edb70f7efa6fc430fd2b87285fe8b84304fd0936faa0dc0"
[[package]]
name = "cfg-if"
version = "1.0.0"
@ -68,13 +124,53 @@ dependencies = [
]
[[package]]
name = "codespan-reporting"
version = "0.11.1"
name = "core-foundation-sys"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
checksum = "ea221b5284a47e40033bf9b66f35f984ec0ea2931eb03505246cd27a963f981b"
[[package]]
name = "crossbeam-channel"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4"
dependencies = [
"termcolor",
"unicode-width",
"cfg-if",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
dependencies = [
"cfg-if",
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd"
dependencies = [
"cfg-if",
"crossbeam-utils",
"lazy_static",
"memoffset",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db"
dependencies = [
"cfg-if",
"lazy_static",
]
[[package]]
@ -125,12 +221,39 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8"
[[package]]
name = "dirs-next"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
dependencies = [
"cfg-if",
"dirs-sys-next",
]
[[package]]
name = "dirs-sys-next"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
dependencies = [
"libc",
"redox_users",
"winapi",
]
[[package]]
name = "doc-comment"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "dunce"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "453440c271cf5577fd2a40e4942540cb7d0d2f85e27c8d07dd0023c925a67541"
[[package]]
name = "either"
version = "1.6.1"
@ -142,12 +265,16 @@ name = "engine-q"
version = "0.1.0"
dependencies = [
"assert_cmd",
"codespan-reporting",
"crossterm",
"miette",
"nu-cli",
"nu-command",
"nu-engine",
"nu-json",
"nu-parser",
"nu-path",
"nu-protocol",
"nu-table",
"pretty_assertions",
"reedline",
"tempfile",
@ -164,6 +291,27 @@ dependencies = [
"wasi",
]
[[package]]
name = "gimli"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0a01e0497841a3b2db4f8afa483cce65f7e96a3498bd6c541734792aeac8fe7"
[[package]]
name = "glob"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
[[package]]
name = "hermit-abi"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
[[package]]
name = "instant"
version = "0.1.11"
@ -173,6 +321,12 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "is_ci"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "616cde7c720bb2bb5824a224687d8f77bfd38922027f01d825cd7453be5099fb"
[[package]]
name = "itertools"
version = "0.10.1"
@ -182,6 +336,12 @@ dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
[[package]]
name = "lazy_static"
version = "1.4.0"
@ -190,9 +350,19 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.102"
version = "0.2.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2a5ac8f984bfcf3a823267e5fde638acc3325f6496633a5da6bb6eb2171e103"
checksum = "dd8f7255a17a627354f321ef0055d63b898c6fb27eff628af4d1b66b7331edf6"
[[package]]
name = "linked-hash-map"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
dependencies = [
"serde",
"serde_test",
]
[[package]]
name = "lock_api"
@ -218,6 +388,55 @@ version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
[[package]]
name = "memoffset"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9"
dependencies = [
"autocfg",
]
[[package]]
name = "miette"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4786c5b04c6f73e96d88444e7f37e241d99479ea5dd88a4887363ab2e03b4e53"
dependencies = [
"atty",
"backtrace",
"miette-derive",
"once_cell",
"owo-colors",
"supports-color",
"supports-hyperlinks",
"supports-unicode",
"term_size",
"textwrap",
"thiserror",
]
[[package]]
name = "miette-derive"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ee63a981bc9cde5f26665ffd756b624963bf0b5956e0df51e52ef8f6b5466d6"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "miniz_oxide"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
dependencies = [
"adler",
"autocfg",
]
[[package]]
name = "mio"
version = "0.7.13"
@ -264,21 +483,26 @@ dependencies = [
name = "nu-cli"
version = "0.1.0"
dependencies = [
"codespan-reporting",
"miette",
"nu-ansi-term",
"nu-engine",
"nu-parser",
"nu-protocol",
"reedline",
"thiserror",
]
[[package]]
name = "nu-command"
version = "0.1.0"
dependencies = [
"glob",
"nu-engine",
"nu-parser",
"nu-json",
"nu-protocol",
"nu-table",
"sysinfo",
"thiserror",
]
[[package]]
@ -286,22 +510,56 @@ name = "nu-engine"
version = "0.1.0"
dependencies = [
"nu-parser",
"nu-path",
"nu-protocol",
]
[[package]]
name = "nu-json"
version = "0.37.1"
dependencies = [
"lazy_static",
"linked-hash-map",
"nu-path",
"num-traits",
"regex",
"serde",
"serde_json",
]
[[package]]
name = "nu-parser"
version = "0.1.0"
dependencies = [
"codespan-reporting",
"miette",
"nu-protocol",
"thiserror",
]
[[package]]
name = "nu-path"
version = "0.37.1"
dependencies = [
"dirs-next",
"dunce",
]
[[package]]
name = "nu-protocol"
version = "0.1.0"
dependencies = [
"codespan-reporting",
"miette",
"serde",
"thiserror",
]
[[package]]
name = "nu-table"
version = "0.36.0"
dependencies = [
"nu-ansi-term",
"regex",
"unicode-width",
]
[[package]]
@ -323,6 +581,31 @@ dependencies = [
"autocfg",
]
[[package]]
name = "num_cpus"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "object"
version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39f37e50073ccad23b6d09bcb5b263f4e76d3bb6038e4a3c08e52162ffa8abc2"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56"
[[package]]
name = "output_vt100"
version = "0.1.2"
@ -338,6 +621,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "owo-colors"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a61765925aec40abdb23812a3a1a01fafc6ffb9da22768b2ce665a9e84e527c"
[[package]]
name = "parking_lot"
version = "0.11.2"
@ -466,6 +755,31 @@ dependencies = [
"rand_core",
]
[[package]]
name = "rayon"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90"
dependencies = [
"autocfg",
"crossbeam-deque",
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
"crossbeam-utils",
"lazy_static",
"num_cpus",
]
[[package]]
name = "redox_syscall"
version = "0.2.10"
@ -475,10 +789,20 @@ dependencies = [
"bitflags",
]
[[package]]
name = "redox_users"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64"
dependencies = [
"getrandom",
"redox_syscall",
]
[[package]]
name = "reedline"
version = "0.2.0"
source = "git+https://github.com/jntrnr/reedline?branch=main#93c2146fcf4257c40426bc2f0c6903d4115caaf1"
source = "git+https://github.com/jntrnr/reedline?branch=main#bfddc5870ca2d8301694b4211bdcdb29e647c6f3"
dependencies = [
"chrono",
"crossterm",
@ -488,12 +812,29 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "regex"
version = "1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
[[package]]
name = "regex-syntax"
version = "0.6.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
[[package]]
name = "remove_dir_all"
version = "0.5.3"
@ -503,6 +844,18 @@ dependencies = [
"winapi",
]
[[package]]
name = "rustc-demangle"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
[[package]]
name = "ryu"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
[[package]]
name = "scopeguard"
version = "1.1.0"
@ -529,6 +882,26 @@ dependencies = [
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "serde_test"
version = "1.0.130"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d82178225dbdeae2d5d190e8649287db6a3a32c6d24da22ae3146325aa353e4c"
dependencies = [
"serde",
]
[[package]]
name = "signal-hook"
version = "0.3.10"
@ -561,9 +934,43 @@ dependencies = [
[[package]]
name = "smallvec"
version = "1.6.1"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e"
checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309"
[[package]]
name = "smawk"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f67ad224767faa3c7d8b6d91985b78e70a1324408abcb1cfcc2be4c06bc06043"
[[package]]
name = "supports-color"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f3cef55878ee693bb9f6765515f52910ec20b776d222fce5d11fbb9f5368028"
dependencies = [
"atty",
"is_ci",
]
[[package]]
name = "supports-hyperlinks"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "590b34f7c5f01ecc9d78dba4b3f445f31df750a67621cf31626f3b7441ce6406"
dependencies = [
"atty",
]
[[package]]
name = "supports-unicode"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8b945e45b417b125a8ec51f1b7df2f8df7920367700d1f98aedd21e5735f8b2"
dependencies = [
"atty",
]
[[package]]
name = "syn"
@ -576,6 +983,21 @@ dependencies = [
"unicode-xid",
]
[[package]]
name = "sysinfo"
version = "0.20.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffff4a02fa61eee51f95210fc9c98ea6eeb46bb071adeafd61e1a0b9b22c6a6d"
dependencies = [
"cfg-if",
"core-foundation-sys",
"libc",
"ntapi",
"once_cell",
"rayon",
"winapi",
]
[[package]]
name = "tempfile"
version = "3.2.0"
@ -591,12 +1013,44 @@ dependencies = [
]
[[package]]
name = "termcolor"
version = "1.1.2"
name = "term_size"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
checksum = "1e4129646ca0ed8f45d09b929036bafad5377103edd06e50bf574b353d2b08d9"
dependencies = [
"winapi-util",
"libc",
"winapi",
]
[[package]]
name = "textwrap"
version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0066c8d12af8b5acd21e00547c3797fde4e8677254a7ee429176ccebbe93dd80"
dependencies = [
"smawk",
"unicode-linebreak",
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "602eca064b2d83369e2b2f34b09c70b605402801927c65c11071ac911d299b88"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bad553cc2c78e8de258400763a647e80e6d1b31ee237275d756f6836d204494c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
@ -616,6 +1070,15 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7f741b240f1a48843f9b8e0444fb55fb2a4ff67293b50a9179dfd5ea67f8d41"
[[package]]
name = "unicode-linebreak"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a52dcaab0c48d931f7cc8ef826fa51690a08e1ea55117ef26f89864f532383f"
dependencies = [
"regex",
]
[[package]]
name = "unicode-segmentation"
version = "1.8.0"
@ -665,15 +1128,6 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"

View File

@ -10,13 +10,16 @@ members = ["crates/nu-cli", "crates/nu-engine", "crates/nu-parser", "crates/nu-c
[dependencies]
reedline = { git = "https://github.com/jntrnr/reedline", branch = "main" }
codespan-reporting = "0.11.1"
crossterm = "0.21.*"
nu-cli = { path="./crates/nu-cli" }
nu-command = { path="./crates/nu-command" }
nu-engine = { path="./crates/nu-engine" }
nu-json = { path="./crates/nu-json" }
nu-parser = { path="./crates/nu-parser" }
nu-path = { path="./crates/nu-path" }
nu-protocol = { path = "./crates/nu-protocol" }
nu-table = { path = "./crates/nu-table" }
miette = "3.0.0"
# mimalloc = { version = "*", default-features = false }
[dev-dependencies]

View File

@ -1,3 +1,5 @@
# Engine-q
Engine-q is a smaller project to reimplement some of the core functionality in Nushell. It's still in an alpha state, and there is still a lot to do: please see TODO.md
Engine-q is an experimental project to replace the core functionality in Nushell (parser, engine, protocol). It's still in an alpha state, and there is still a lot to do: please see TODO.md
If you'd like to help out, come join us on the [discord](https://discord.gg/NtAbbGn) or propose some work in an issue or PR draft. We're currently looking to begin porting Nushell commands to engine-q.

14
TODO.md
View File

@ -17,16 +17,24 @@
- [x] Column path
- [x] ...rest without calling it rest
- [x] Iteration (`each`) over tables
- [x] Row conditions
- [x] Simple completions
- [x] Detecting `$it` currently only looks at top scope but should find any free `$it` in the expression (including subexprs)
- [x] Signature needs to make parameters visible in scope before block is parsed
- [x] Externals
- [x] Modules and imports
- [ ] Exports
- [ ] Support for `$in`
- [ ] Value serialization
- [ ] Handling rows with missing columns during a cell path
- [ ] Error shortcircuit (stopping on first error)
- [ ] ctrl-c support
- [ ] operator overflow
- [ ] finish operator type-checking
- [ ] Source
- [ ] Autoenv
- [ ] Externals
- [ ] let [first, rest] = [1, 2, 3] (design question: how do you pattern match a table?)
- [ ] Overlays (replacement for `autoenv`)
## Maybe:
- [ ] default param values?
- [ ] Unary not?
- [ ] let [first, rest] = [1, 2, 3] (design question: how do you pattern match a table?)

View File

@ -7,6 +7,7 @@ edition = "2018"
nu-engine = { path = "../nu-engine" }
nu-parser = { path = "../nu-parser" }
nu-protocol = { path = "../nu-protocol" }
codespan-reporting = "0.11.1"
miette = { version = "3.0.0", features = ["fancy"] }
thiserror = "1.0.29"
nu-ansi-term = "0.36.0"
reedline = { git = "https://github.com/jntrnr/reedline", branch = "main" }

View File

@ -0,0 +1,90 @@
use std::{cell::RefCell, rc::Rc};
use nu_engine::eval_block;
use nu_parser::{flatten_block, parse};
use nu_protocol::{
engine::{EngineState, EvaluationContext, Stack, StateWorkingSet},
Value,
};
use reedline::Completer;
pub struct NuCompleter {
engine_state: Rc<RefCell<EngineState>>,
}
impl NuCompleter {
pub fn new(engine_state: Rc<RefCell<EngineState>>) -> Self {
Self { engine_state }
}
}
impl Completer for NuCompleter {
fn complete(&self, line: &str, pos: usize) -> Vec<(reedline::Span, String)> {
let engine_state = self.engine_state.borrow();
let mut working_set = StateWorkingSet::new(&*engine_state);
let offset = working_set.next_span_start();
let pos = offset + pos;
let (output, _err) = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
let flattened = flatten_block(&working_set, &output);
for flat in flattened {
if pos >= flat.0.start && pos <= flat.0.end {
match &flat.1 {
nu_parser::FlatShape::Custom(custom_completion) => {
let prefix = working_set.get_span_contents(flat.0).to_vec();
let (block, ..) =
parse(&mut working_set, None, custom_completion.as_bytes(), false);
let context = EvaluationContext {
engine_state: self.engine_state.clone(),
stack: Stack::default(),
};
let result = eval_block(&context, &block, Value::nothing());
let v: Vec<_> = match result {
Ok(Value::List { vals, .. }) => vals
.into_iter()
.map(move |x| {
let s = x.as_string().expect("FIXME");
(
reedline::Span {
start: flat.0.start - offset,
end: flat.0.end - offset,
},
s,
)
})
.filter(|x| x.1.as_bytes().starts_with(&prefix))
.collect(),
_ => vec![],
};
return v;
}
nu_parser::FlatShape::External | nu_parser::FlatShape::InternalCall => {
let prefix = working_set.get_span_contents(flat.0);
let results = working_set.find_commands_by_prefix(prefix);
return results
.into_iter()
.map(move |x| {
(
reedline::Span {
start: flat.0.start - offset,
end: flat.0.end - offset,
},
String::from_utf8_lossy(&x).to_string(),
)
})
.collect();
}
_ => {}
}
}
}
vec![]
}
}

View File

@ -1,372 +1,53 @@
use core::ops::Range;
use miette::{LabeledSpan, MietteHandler, ReportHandler, Severity, SourceCode};
use nu_protocol::engine::StateWorkingSet;
use thiserror::Error;
use codespan_reporting::diagnostic::{Diagnostic, Label};
use codespan_reporting::term::termcolor::{ColorChoice, StandardStream};
use nu_parser::ParseError;
use nu_protocol::{engine::StateWorkingSet, ShellError, Span};
/// This error exists so that we can defer SourceCode handling. It simply
/// forwards most methods, except for `.source_code()`, which we provide.
#[derive(Error)]
#[error("{0}")]
struct CliError<'src>(
&'src (dyn miette::Diagnostic + Send + Sync + 'static),
&'src StateWorkingSet<'src>,
);
fn convert_span_to_diag(
working_set: &StateWorkingSet,
span: &Span,
) -> Result<(usize, Range<usize>), Box<dyn std::error::Error>> {
for (file_id, (_, start, end)) in working_set.files().enumerate() {
if span.start >= *start && span.end <= *end {
let new_start = span.start - start;
let new_end = span.end - start;
impl std::fmt::Debug for CliError<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
MietteHandler::default().debug(self, f)?;
Ok(())
}
}
return Ok((file_id, new_start..new_end));
}
impl<'src> miette::Diagnostic for CliError<'src> {
fn code<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.0.code()
}
if span.start == working_set.next_span_start() {
// We're trying to highlight the space after the end
if let Some((file_id, (_, _, end))) = working_set.files().enumerate().last() {
return Ok((file_id, *end..(*end + 1)));
}
fn severity(&self) -> Option<Severity> {
self.0.severity()
}
panic!(
"internal error: can't find span in parser state: {:?}",
span
)
fn help<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.0.help()
}
fn url<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.0.url()
}
fn labels<'a>(&'a self) -> Option<Box<dyn Iterator<Item = LabeledSpan> + 'a>> {
self.0.labels()
}
// Finally, we redirect the source_code method to our own source.
fn source_code(&self) -> Option<&dyn SourceCode> {
Some(&self.1)
}
}
pub fn report_parsing_error(
pub fn report_error(
working_set: &StateWorkingSet,
error: &ParseError,
) -> Result<(), Box<dyn std::error::Error>> {
let writer = StandardStream::stderr(ColorChoice::Always);
let config = codespan_reporting::term::Config::default();
let diagnostic =
match error {
ParseError::Mismatch(expected, found, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Type mismatch during operation")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("expected {}, found {}", expected, found))])
}
ParseError::ExtraTokens(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Extra tokens in code")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("extra tokens")
])
}
ParseError::ExtraPositional(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Extra positional argument")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("extra positional argument")])
}
ParseError::UnexpectedEof(s, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Unexpected end of code")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("expected {}", s))])
}
ParseError::Unclosed(delim, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Unclosed delimiter")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("unclosed {}", delim))])
}
ParseError::UnknownStatement(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Unknown statement")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("unknown statement")
])
}
ParseError::MultipleRestParams(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Multiple rest params")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("multiple rest params")])
}
ParseError::VariableNotFound(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Variable not found")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("variable not found")
])
}
ParseError::UnknownCommand(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Unknown command")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("unknown command")
])
}
ParseError::UnknownFlag(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Unknown flag")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("unknown flag")
])
}
ParseError::UnknownType(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Unknown type")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("unknown type")
])
}
ParseError::MissingFlagParam(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Missing flag param")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("flag missing parameter")])
}
ParseError::ShortFlagBatchCantTakeArg(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Batches of short flags can't take arguments")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("short flag batches can't take args")])
}
ParseError::KeywordMissingArgument(name, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message(format!("Missing argument to {}", name))
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("missing value that follows {}", name))])
}
ParseError::MissingPositional(name, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Missing required positional arg")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("missing {}", name))])
}
ParseError::MissingType(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Missing type")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("expected type")
])
}
ParseError::TypeMismatch(expected, found, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Type mismatch")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("expected {:?}, found {:?}", expected, found))])
}
ParseError::MissingRequiredFlag(name, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Missing required flag")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("missing required flag {}", name))])
}
ParseError::IncompleteMathExpression(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Incomplete math expresssion")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("incomplete math expression")])
}
ParseError::UnknownState(name, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Unknown state")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("unknown state {}", name))])
}
ParseError::NonUtf8(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Non-UTF8 code")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("non-UTF8 code")
])
}
ParseError::Expected(expected, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Parse mismatch during operation")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("expected {}", expected))])
}
ParseError::UnsupportedOperation(op_span, lhs_span, lhs_ty, rhs_span, rhs_ty) => {
let (lhs_file_id, lhs_range) = convert_span_to_diag(working_set, lhs_span)?;
let (rhs_file_id, rhs_range) = convert_span_to_diag(working_set, rhs_span)?;
let (op_file_id, op_range) = convert_span_to_diag(working_set, op_span)?;
Diagnostic::error()
.with_message("Unsupported operation")
.with_labels(vec![
Label::primary(op_file_id, op_range)
.with_message("doesn't support these values"),
Label::secondary(lhs_file_id, lhs_range).with_message(lhs_ty.to_string()),
Label::secondary(rhs_file_id, rhs_range).with_message(rhs_ty.to_string()),
])
}
ParseError::ExpectedKeyword(expected, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Expected keyword")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("expected {}", expected))])
}
ParseError::IncompleteParser(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Parser incomplete")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("parser support missing for this expression")])
}
ParseError::RestNeedsName(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Rest parameter needs a name")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("needs a parameter name")])
}
};
// println!("DIAG");
// println!("{:?}", diagnostic);
codespan_reporting::term::emit(&mut writer.lock(), &config, working_set, &diagnostic)?;
Ok(())
}
pub fn report_shell_error(
working_set: &StateWorkingSet,
error: &ShellError,
) -> Result<(), Box<dyn std::error::Error>> {
let writer = StandardStream::stderr(ColorChoice::Always);
let config = codespan_reporting::term::Config::default();
let diagnostic =
match error {
ShellError::OperatorMismatch {
op_span,
lhs_ty,
lhs_span,
rhs_ty,
rhs_span,
} => {
let (lhs_file_id, lhs_range) = convert_span_to_diag(working_set, lhs_span)?;
let (rhs_file_id, rhs_range) = convert_span_to_diag(working_set, rhs_span)?;
let (op_file_id, op_range) = convert_span_to_diag(working_set, op_span)?;
Diagnostic::error()
.with_message("Type mismatch during operation")
.with_labels(vec![
Label::primary(op_file_id, op_range)
.with_message("type mismatch for operator"),
Label::secondary(lhs_file_id, lhs_range).with_message(lhs_ty.to_string()),
Label::secondary(rhs_file_id, rhs_range).with_message(rhs_ty.to_string()),
])
}
ShellError::UnsupportedOperator(op, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message(format!("Unsupported operator: {}", op))
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("unsupported operator")])
}
ShellError::UnknownOperator(op, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message(format!("Unsupported operator: {}", op))
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("unsupported operator")])
}
ShellError::ExternalNotSupported(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("External commands not yet supported")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("external not supported")])
}
ShellError::InternalError(s) => {
Diagnostic::error().with_message(format!("Internal error: {}", s))
}
ShellError::VariableNotFoundAtRuntime(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Variable not found")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("variable not found")
])
}
ShellError::CantConvert(s, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message(format!("Can't convert to {}", s))
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("can't convert to {}", s))])
}
ShellError::CannotCreateRange(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Can't convert range to countable values")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("can't convert to countable values")])
}
ShellError::DivisionByZero(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Division by zero")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("division by zero")
])
}
ShellError::AccessBeyondEnd(len, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Row number too large")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("row number too large (max: {})", *len))])
}
ShellError::AccessBeyondEndOfStream(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Row number too large")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message("row number too large")])
}
ShellError::IncompatiblePathAccess(name, span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
Diagnostic::error()
.with_message("Data cannot be accessed with a cell path")
.with_labels(vec![Label::primary(diag_file_id, diag_range)
.with_message(format!("{} doesn't support cell paths", name))])
}
ShellError::CantFindColumn(span) => {
let (diag_file_id, diag_range) = convert_span_to_diag(working_set, span)?;
//FIXME: add "did you mean"
Diagnostic::error()
.with_message("Cannot find column")
.with_labels(vec![
Label::primary(diag_file_id, diag_range).with_message("cannot find column")
])
}
};
// println!("DIAG");
// println!("{:?}", diagnostic);
codespan_reporting::term::emit(&mut writer.lock(), &config, working_set, &diagnostic)?;
Ok(())
error: &(dyn miette::Diagnostic + Send + Sync + 'static),
) {
eprintln!("Error: {:?}", CliError(error, working_set));
}

View File

@ -1,5 +1,9 @@
mod completions;
mod errors;
mod syntax_highlight;
mod validation;
pub use errors::{report_parsing_error, report_shell_error};
pub use completions::NuCompleter;
pub use errors::report_error;
pub use syntax_highlight::NuHighlighter;
pub use validation::NuValidator;

View File

@ -39,7 +39,9 @@ impl Highlighter for NuHighlighter {
[(shape.0.start - global_span_offset)..(shape.0.end - global_span_offset)]
.to_string();
match shape.1 {
FlatShape::Custom(..) => output.push((Style::new().bold(), next_token)),
FlatShape::External => output.push((Style::new().bold(), next_token)),
FlatShape::ExternalArg => output.push((Style::new().bold(), next_token)),
FlatShape::Garbage => output.push((
Style::new()
.fg(nu_ansi_term::Color::White)

View File

@ -0,0 +1,23 @@
use std::{cell::RefCell, rc::Rc};
use nu_parser::{parse, ParseError};
use nu_protocol::engine::{EngineState, StateWorkingSet};
use reedline::{ValidationResult, Validator};
pub struct NuValidator {
pub engine_state: Rc<RefCell<EngineState>>,
}
impl Validator for NuValidator {
fn validate(&self, line: &str) -> ValidationResult {
let engine_state = self.engine_state.borrow();
let mut working_set = StateWorkingSet::new(&*engine_state);
let (_, err) = parse(&mut working_set, None, line.as_bytes(), false);
if matches!(err, Some(ParseError::UnexpectedEof(..))) {
ValidationResult::Incomplete
} else {
ValidationResult::Complete
}
}
}

View File

@ -6,6 +6,12 @@ edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nu-protocol = { path = "../nu-protocol" }
nu-engine = { path = "../nu-engine" }
nu-parser = {path = "../nu-parser"}
nu-json = { path = "../nu-json" }
nu-protocol = { path = "../nu-protocol" }
nu-table = { path = "../nu-table" }
# Potential dependencies for extras
glob = "0.3.0"
thiserror = "1.0.29"
sysinfo = "0.20.4"

View File

@ -17,7 +17,11 @@ impl Command for Def {
Signature::build("def")
.required("def_name", SyntaxShape::String, "definition name")
.required("params", SyntaxShape::Signature, "parameters")
.required("block", SyntaxShape::Block, "body of the definition")
.required(
"block",
SyntaxShape::Block(Some(vec![])),
"body of the definition",
)
}
fn run(

View File

@ -15,7 +15,11 @@ impl Command for Do {
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("do").required("block", SyntaxShape::Block, "the block to run")
Signature::build("do").required(
"block",
SyntaxShape::Block(Some(vec![])),
"the block to run",
)
}
fn run(

View File

@ -0,0 +1,406 @@
use nu_protocol::{
ast::Call,
engine::{Command, EvaluationContext},
Example, ShellError, Signature, Spanned, SyntaxShape, Value,
};
use nu_engine::CallExt;
pub struct Help;
impl Command for Help {
fn name(&self) -> &str {
"help"
}
fn signature(&self) -> Signature {
Signature::build("help")
.rest(
"rest",
SyntaxShape::String,
"the name of command to get help on",
)
.named(
"find",
SyntaxShape::String,
"string to find in command usage",
Some('f'),
)
}
fn usage(&self) -> &str {
"Display help information about commands."
}
fn run(
&self,
context: &EvaluationContext,
call: &Call,
_input: Value,
) -> Result<Value, ShellError> {
help(context, call)
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "show all commands and sub-commands",
example: "help commands",
result: None,
},
Example {
description: "generate documentation",
example: "help generate_docs",
result: None,
},
Example {
description: "show help for single command",
example: "help match",
result: None,
},
Example {
description: "show help for single sub-command",
example: "help str lpad",
result: None,
},
Example {
description: "search for string in command usage",
example: "help --find char",
result: None,
},
]
}
}
fn help(context: &EvaluationContext, call: &Call) -> Result<Value, ShellError> {
let span = call.head;
let find: Option<Spanned<String>> = call.get_flag(context, "find")?;
let rest: Vec<Spanned<String>> = call.rest(context, 0)?;
let full_commands = context.get_commands_info();
if let Some(f) = find {
let search_string = f.item;
let mut found_cmds_vec = Vec::new();
for cmd in full_commands {
let mut cols = vec![];
let mut vals = vec![];
let key = cmd.name.clone();
let c = cmd.usage.clone();
let e = cmd.extra_usage.clone();
if key.to_lowercase().contains(&search_string)
|| c.to_lowercase().contains(&search_string)
|| e.to_lowercase().contains(&search_string)
{
cols.push("name".into());
vals.push(Value::String { val: key, span });
cols.push("usage".into());
vals.push(Value::String { val: c, span });
cols.push("extra_usage".into());
vals.push(Value::String { val: e, span });
found_cmds_vec.push(Value::Record { cols, vals, span });
}
}
return Ok(Value::List {
vals: found_cmds_vec,
span,
});
}
if !rest.is_empty() {
let mut found_cmds_vec = Vec::new();
if rest[0].item == "commands" {
for cmd in full_commands {
let mut cols = vec![];
let mut vals = vec![];
let key = cmd.name.clone();
let c = cmd.usage.clone();
let e = cmd.extra_usage.clone();
cols.push("name".into());
vals.push(Value::String { val: key, span });
cols.push("usage".into());
vals.push(Value::String { val: c, span });
cols.push("extra_usage".into());
vals.push(Value::String { val: e, span });
found_cmds_vec.push(Value::Record { cols, vals, span });
}
} else {
let mut name = String::new();
for r in rest {
if !name.is_empty() {
name.push(' ');
}
name.push_str(&r.item);
}
for cmd in full_commands {
let mut cols = vec![];
let mut vals = vec![];
let key = cmd.name.clone();
let c = cmd.usage.clone();
let e = cmd.extra_usage.clone();
if key.starts_with(&name) {
cols.push("name".into());
vals.push(Value::String { val: key, span });
cols.push("usage".into());
vals.push(Value::String { val: c, span });
cols.push("extra_usage".into());
vals.push(Value::String { val: e, span });
found_cmds_vec.push(Value::Record { cols, vals, span });
}
}
}
Ok(Value::List {
vals: found_cmds_vec,
span,
})
// FIXME: the fancy help stuff needs to be reimplemented
/*
if rest[0].item == "commands" {
let mut sorted_names = scope.get_command_names();
sorted_names.sort();
let (mut subcommand_names, command_names) = sorted_names
.into_iter()
// private only commands shouldn't be displayed
.filter(|cmd_name| {
scope
.get_command(cmd_name)
.filter(|command| !command.is_private())
.is_some()
})
.partition::<Vec<_>, _>(|cmd_name| cmd_name.contains(' '));
fn process_name(
dict: &mut TaggedDictBuilder,
cmd_name: &str,
scope: Scope,
rest: Vec<Tagged<String>>,
name: Tag,
) -> Result<(), ShellError> {
let document_tag = rest[0].tag.clone();
let value = command_dict(
scope.get_command(cmd_name).ok_or_else(|| {
ShellError::labeled_error(
format!("Could not load {}", cmd_name),
"could not load command",
document_tag,
)
})?,
name,
);
dict.insert_untagged("name", cmd_name);
dict.insert_untagged(
"description",
value
.get_data_by_key("usage".spanned_unknown())
.ok_or_else(|| {
ShellError::labeled_error(
"Expected a usage key",
"expected a 'usage' key",
&value.tag,
)
})?
.as_string()?,
);
Ok(())
}
fn make_subcommands_table(
subcommand_names: &mut Vec<String>,
cmd_name: &str,
scope: Scope,
rest: Vec<Tagged<String>>,
name: Tag,
) -> Result<Value, ShellError> {
let (matching, not_matching) =
subcommand_names.drain(..).partition(|subcommand_name| {
subcommand_name.starts_with(&format!("{} ", cmd_name))
});
*subcommand_names = not_matching;
Ok(if !matching.is_empty() {
UntaggedValue::table(
&(matching
.into_iter()
.map(|cmd_name: String| -> Result<_, ShellError> {
let mut short_desc = TaggedDictBuilder::new(name.clone());
process_name(
&mut short_desc,
&cmd_name,
scope.clone(),
rest.clone(),
name.clone(),
)?;
Ok(short_desc.into_value())
})
.collect::<Result<Vec<_>, _>>()?[..]),
)
.into_value(name)
} else {
UntaggedValue::nothing().into_value(name)
})
}
let iterator =
command_names
.into_iter()
.map(move |cmd_name| -> Result<_, ShellError> {
let mut short_desc = TaggedDictBuilder::new(name.clone());
process_name(
&mut short_desc,
&cmd_name,
scope.clone(),
rest.clone(),
name.clone(),
)?;
short_desc.insert_value(
"subcommands",
make_subcommands_table(
&mut subcommand_names,
&cmd_name,
scope.clone(),
rest.clone(),
name.clone(),
)?,
);
ReturnSuccess::value(short_desc.into_value())
});
Ok(iterator.into_action_stream())
} else if rest[0].item == "generate_docs" {
Ok(ActionStream::one(ReturnSuccess::value(generate_docs(
&scope,
))))
} else if rest.len() == 2 {
// Check for a subcommand
let command_name = format!("{} {}", rest[0].item, rest[1].item);
if let Some(command) = scope.get_command(&command_name) {
Ok(ActionStream::one(ReturnSuccess::value(
UntaggedValue::string(get_full_help(command.stream_command(), &scope))
.into_value(Tag::unknown()),
)))
} else {
Ok(ActionStream::empty())
}
} else if let Some(command) = scope.get_command(&rest[0].item) {
Ok(ActionStream::one(ReturnSuccess::value(
UntaggedValue::string(get_full_help(command.stream_command(), &scope))
.into_value(Tag::unknown()),
)))
} else {
Err(ShellError::labeled_error(
"Can't find command (use 'help commands' for full list)",
"can't find command",
rest[0].tag.span,
))
}
*/
} else {
let msg = r#"Welcome to Nushell.
Here are some tips to help you get started.
* help commands - list all available commands
* help <command name> - display help about a particular command
Nushell works on the idea of a "pipeline". Pipelines are commands connected with the '|' character.
Each stage in the pipeline works together to load, parse, and display information to you.
[Examples]
List the files in the current directory, sorted by size:
ls | sort-by size
Get information about the current system:
sys | get host
Get the processes on your system actively using CPU:
ps | where cpu > 0
You can also learn more at https://www.nushell.sh/book/"#;
Ok(Value::String {
val: msg.into(),
span,
})
}
}
/*
fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Value {
let tag = tag.into();
let mut spec = TaggedDictBuilder::new(tag);
spec.insert_untagged("name", UntaggedValue::string(name));
spec.insert_untagged("type", UntaggedValue::string(ty));
spec.insert_untagged(
"required",
UntaggedValue::string(if required { "yes" } else { "no" }),
);
spec.into_value()
}
pub fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Value {
let tag = tag.into();
let mut sig = TaggedListBuilder::new(&tag);
for arg in &signature.positional {
let is_required = matches!(arg.0, PositionalType::Mandatory(_, _));
sig.push_value(for_spec(arg.0.name(), "argument", is_required, &tag));
}
if signature.rest_positional.is_some() {
let is_required = false;
sig.push_value(for_spec("rest", "argument", is_required, &tag));
}
for (name, ty) in &signature.named {
match ty.0 {
NamedType::Mandatory(_, _) => sig.push_value(for_spec(name, "flag", true, &tag)),
NamedType::Optional(_, _) => sig.push_value(for_spec(name, "flag", false, &tag)),
NamedType::Switch(_) => sig.push_value(for_spec(name, "switch", false, &tag)),
}
}
sig.into_value()
}
fn command_dict(command: Command, tag: impl Into<Tag>) -> Value {
let tag = tag.into();
let mut cmd_dict = TaggedDictBuilder::new(&tag);
cmd_dict.insert_untagged("name", UntaggedValue::string(command.name()));
cmd_dict.insert_untagged("type", UntaggedValue::string("Command"));
cmd_dict.insert_value("signature", signature_dict(command.signature(), tag));
cmd_dict.insert_untagged("usage", UntaggedValue::string(command.usage()));
cmd_dict.into_value()
}
*/

View File

@ -11,13 +11,13 @@ impl Command for If {
}
fn usage(&self) -> &str {
"Create a variable and give it a value."
"Conditionally run a block."
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("if")
.required("cond", SyntaxShape::Expression, "condition")
.required("then_block", SyntaxShape::Block, "then block")
.required("then_block", SyntaxShape::Block(Some(vec![])), "then block")
.optional(
"else",
SyntaxShape::Keyword(b"else".to_vec(), Box::new(SyntaxShape::Expression)),

View File

@ -0,0 +1,19 @@
mod alias;
mod def;
mod do_;
mod help;
mod if_;
mod let_;
mod module;
mod source;
mod use_;
pub use alias::Alias;
pub use def::Def;
pub use do_::Do;
pub use help::Help;
pub use if_::If;
pub use let_::Let;
pub use module::Module;
pub use source::Source;
pub use use_::Use;

View File

@ -0,0 +1,34 @@
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{Signature, SyntaxShape, Value};
pub struct Module;
impl Command for Module {
fn name(&self) -> &str {
"module"
}
fn usage(&self) -> &str {
"Define a custom module"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("module")
.required("module_name", SyntaxShape::String, "module name")
.required(
"block",
SyntaxShape::Block(Some(vec![])),
"body of the module",
)
}
fn run(
&self,
_context: &EvaluationContext,
call: &Call,
_input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
Ok(Value::Nothing { span: call.head })
}
}

View File

@ -1,10 +1,6 @@
use nu_engine::{eval_block, eval_expression};
use nu_parser::parse;
use nu_protocol::ast::{Block, Call};
use nu_protocol::engine::{Command, EngineState, EvaluationContext, StateWorkingSet};
use nu_protocol::{ShellError, Signature, SyntaxShape, Value};
use std::task::Context;
use std::{borrow::Cow, path::Path, path::PathBuf};
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{Signature, SyntaxShape, Value};
/// Source a file for environment variables.
pub struct Source;

View File

@ -0,0 +1,28 @@
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{Signature, SyntaxShape, Value};
pub struct Use;
impl Command for Use {
fn name(&self) -> &str {
"use"
}
fn usage(&self) -> &str {
"Use definitions from a module"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("use").required("module_name", SyntaxShape::String, "module name")
}
fn run(
&self,
_context: &EvaluationContext,
call: &Call,
_input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
Ok(Value::Nothing { span: call.head })
}
}

View File

@ -2,10 +2,14 @@ use std::{cell::RefCell, rc::Rc};
use nu_protocol::{
engine::{EngineState, StateWorkingSet},
Signature, SyntaxShape,
Signature,
};
use crate::{Alias, Benchmark, BuildString, Def, Do, Each, For, If, Length, Let, LetEnv, Source};
use crate::{
Alias, Benchmark, BuildString, Def, Do, Each, External, For, From, FromJson, Git, GitCheckout,
Help, If, Length, Let, LetEnv, Lines, ListGitBranches, Ls, Module, Ps, Source, Sys, Table, Use,
Where,
};
pub fn create_default_context() -> Rc<RefCell<EngineState>> {
let engine_state = Rc::new(RefCell::new(EngineState::new()));
@ -13,31 +17,34 @@ pub fn create_default_context() -> Rc<RefCell<EngineState>> {
let engine_state = engine_state.borrow();
let mut working_set = StateWorkingSet::new(&*engine_state);
let sig =
Signature::build("where").required("cond", SyntaxShape::RowCondition, "condition");
working_set.add_decl(sig.predeclare());
working_set.add_decl(Box::new(If));
working_set.add_decl(Box::new(Let));
working_set.add_decl(Box::new(LetEnv));
working_set.add_decl(Box::new(Alias));
working_set.add_decl(Box::new(BuildString));
working_set.add_decl(Box::new(Def));
working_set.add_decl(Box::new(For));
working_set.add_decl(Box::new(Each));
working_set.add_decl(Box::new(Do));
working_set.add_decl(Box::new(Benchmark));
working_set.add_decl(Box::new(BuildString));
working_set.add_decl(Box::new(Def));
working_set.add_decl(Box::new(Do));
working_set.add_decl(Box::new(Each));
working_set.add_decl(Box::new(External));
working_set.add_decl(Box::new(For));
working_set.add_decl(Box::new(From));
working_set.add_decl(Box::new(FromJson));
working_set.add_decl(Box::new(Help));
working_set.add_decl(Box::new(If));
working_set.add_decl(Box::new(Length));
working_set.add_decl(Box::new(Let));
working_set.add_decl(Box::new(LetEnv));
working_set.add_decl(Box::new(Lines));
working_set.add_decl(Box::new(Ls));
working_set.add_decl(Box::new(Module));
working_set.add_decl(Box::new(Ps));
working_set.add_decl(Box::new(Sys));
working_set.add_decl(Box::new(Table));
working_set.add_decl(Box::new(Use));
working_set.add_decl(Box::new(Where));
// This is a WIP proof of concept
working_set.add_decl(Box::new(ListGitBranches));
working_set.add_decl(Box::new(Git));
working_set.add_decl(Box::new(GitCheckout));
working_set.add_decl(Box::new(Source));
@ -51,6 +58,8 @@ pub fn create_default_context() -> Rc<RefCell<EngineState>> {
working_set.add_decl(sig.predeclare());
let sig = Signature::build("stack");
working_set.add_decl(sig.predeclare());
let sig = Signature::build("contents");
working_set.add_decl(sig.predeclare());
working_set.render()
};

3
crates/nu-command/src/env/mod.rs vendored Normal file
View File

@ -0,0 +1,3 @@
mod let_env;
pub use let_env::LetEnv;

View File

@ -0,0 +1,51 @@
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{Signature, Value};
pub struct Git;
impl Command for Git {
fn name(&self) -> &str {
"git"
}
fn usage(&self) -> &str {
"Run a block"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("git")
}
fn run(
&self,
_context: &EvaluationContext,
call: &Call,
_input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
use std::process::Command as ProcessCommand;
use std::process::Stdio;
let proc = ProcessCommand::new("git").stdout(Stdio::piped()).spawn();
match proc {
Ok(child) => {
match child.wait_with_output() {
Ok(val) => {
let result = val.stdout;
Ok(Value::string(&String::from_utf8_lossy(&result), call.head))
}
Err(_err) => {
// FIXME
Ok(Value::nothing())
}
}
}
Err(_err) => {
// FIXME
Ok(Value::nothing())
}
}
}
}

View File

@ -0,0 +1,66 @@
use nu_engine::eval_expression;
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{Signature, SyntaxShape, Value};
pub struct GitCheckout;
impl Command for GitCheckout {
fn name(&self) -> &str {
"git checkout"
}
fn usage(&self) -> &str {
"Checkout a git revision"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("git checkout").required(
"branch",
SyntaxShape::Custom(Box::new(SyntaxShape::String), "list-git-branches".into()),
"the branch to checkout",
)
}
fn run(
&self,
context: &EvaluationContext,
call: &Call,
_input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
use std::process::Command as ProcessCommand;
use std::process::Stdio;
let block = &call.positional[0];
let out = eval_expression(context, block)?;
let out = out.as_string()?;
let proc = ProcessCommand::new("git")
.arg("checkout")
.arg(out)
.stdout(Stdio::piped())
.spawn();
match proc {
Ok(child) => {
match child.wait_with_output() {
Ok(val) => {
let result = val.stdout;
Ok(Value::string(&String::from_utf8_lossy(&result), call.head))
}
Err(_err) => {
// FIXME
Ok(Value::nothing())
}
}
}
Err(_err) => {
// FIXME
Ok(Value::nothing())
}
}
}
}

View File

@ -0,0 +1,69 @@
// Note: this is a temporary command that later will be converted into a pipeline
use std::process::Command as ProcessCommand;
use std::process::Stdio;
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{Signature, Value};
pub struct ListGitBranches;
//NOTE: this is not a real implementation :D. It's just a simple one to test with until we port the real one.
impl Command for ListGitBranches {
fn name(&self) -> &str {
"list-git-branches"
}
fn usage(&self) -> &str {
"List the git branches of the current directory."
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("list-git-branches")
}
fn run(
&self,
_context: &EvaluationContext,
call: &Call,
_input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
let list_branches = ProcessCommand::new("git")
.arg("branch")
.stdout(Stdio::piped())
.spawn();
if let Ok(child) = list_branches {
if let Ok(output) = child.wait_with_output() {
let val = output.stdout;
let s = String::from_utf8_lossy(&val).to_string();
let lines: Vec<_> = s
.lines()
.filter_map(|x| {
if x.starts_with("* ") {
None
} else {
Some(x.trim())
}
})
.map(|x| Value::String {
val: x.into(),
span: call.head,
})
.collect();
Ok(Value::List {
vals: lines,
span: call.head,
})
} else {
Ok(Value::Nothing { span: call.head })
}
} else {
Ok(Value::Nothing { span: call.head })
}
}
}

View File

@ -0,0 +1,7 @@
mod git;
mod git_checkout;
mod list_git_branches;
pub use git::Git;
pub use git_checkout::GitCheckout;
pub use list_git_branches::ListGitBranches;

View File

@ -0,0 +1,93 @@
use nu_engine::eval_expression;
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{IntoValueStream, Signature, SyntaxShape, Value};
pub struct Ls;
//NOTE: this is not a real implementation :D. It's just a simple one to test with until we port the real one.
impl Command for Ls {
fn name(&self) -> &str {
"ls"
}
fn usage(&self) -> &str {
"List the files in a directory."
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("ls").optional(
"pattern",
SyntaxShape::GlobPattern,
"the glob pattern to use",
)
}
fn run(
&self,
context: &EvaluationContext,
call: &Call,
_input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
let pattern = if let Some(expr) = call.positional.get(0) {
let result = eval_expression(context, expr)?;
result.as_string()?
} else {
"*".into()
};
let call_span = call.head;
let glob = glob::glob(&pattern).unwrap();
Ok(Value::Stream {
stream: glob
.into_iter()
.map(move |x| match x {
Ok(path) => match std::fs::symlink_metadata(&path) {
Ok(metadata) => {
let is_file = metadata.is_file();
let is_dir = metadata.is_dir();
let filesize = metadata.len();
Value::Record {
cols: vec!["name".into(), "type".into(), "size".into()],
vals: vec![
Value::String {
val: path.to_string_lossy().to_string(),
span: call_span,
},
if is_file {
Value::string("file", call_span)
} else if is_dir {
Value::string("dir", call_span)
} else {
Value::Nothing { span: call_span }
},
Value::Int {
val: filesize as i64,
span: call_span,
},
],
span: call_span,
}
}
Err(_) => Value::Record {
cols: vec!["name".into(), "type".into(), "size".into()],
vals: vec![
Value::String {
val: path.to_string_lossy().to_string(),
span: call_span,
},
Value::Nothing { span: call_span },
Value::Nothing { span: call_span },
],
span: call_span,
},
},
_ => Value::Nothing { span: call_span },
})
.into_value_stream(),
span: call_span,
})
}
}

View File

@ -0,0 +1,3 @@
mod ls;
pub use ls::Ls;

View File

@ -15,7 +15,13 @@ impl Command for Each {
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("each").required("block", SyntaxShape::Block, "the block to run")
Signature::build("each")
.required(
"block",
SyntaxShape::Block(Some(vec![SyntaxShape::Any])),
"the block to run",
)
.switch("numbered", "iterate with an index", Some('n'))
}
fn run(
@ -27,20 +33,42 @@ impl Command for Each {
let block_id = call.positional[0]
.as_block()
.expect("internal error: expected block");
let numbered = call.has_flag("numbered");
let context = context.clone();
let span = call.head;
match input {
Value::Range { val, .. } => Ok(Value::Stream {
stream: val
.into_iter()
.map(move |x| {
.enumerate()
.map(move |(idx, x)| {
let engine_state = context.engine_state.borrow();
let block = engine_state.get_block(block_id);
let state = context.enter_scope();
if let Some(var) = block.signature.get_positional(0) {
if let Some(var_id) = &var.var_id {
state.add_var(*var_id, x);
if numbered {
state.add_var(
*var_id,
Value::Record {
cols: vec!["index".into(), "item".into()],
vals: vec![
Value::Int {
val: idx as i64,
span,
},
x,
],
span,
},
);
} else {
state.add_var(*var_id, x);
}
}
}
@ -55,14 +83,32 @@ impl Command for Each {
Value::List { vals: val, .. } => Ok(Value::Stream {
stream: val
.into_iter()
.map(move |x| {
.enumerate()
.map(move |(idx, x)| {
let engine_state = context.engine_state.borrow();
let block = engine_state.get_block(block_id);
let state = context.enter_scope();
if let Some(var) = block.signature.get_positional(0) {
if let Some(var_id) = &var.var_id {
state.add_var(*var_id, x);
if numbered {
state.add_var(
*var_id,
Value::Record {
cols: vec!["index".into(), "item".into()],
vals: vec![
Value::Int {
val: idx as i64,
span,
},
x,
],
span,
},
);
} else {
state.add_var(*var_id, x);
}
}
}
@ -76,14 +122,32 @@ impl Command for Each {
}),
Value::Stream { stream, .. } => Ok(Value::Stream {
stream: stream
.map(move |x| {
.enumerate()
.map(move |(idx, x)| {
let engine_state = context.engine_state.borrow();
let block = engine_state.get_block(block_id);
let state = context.enter_scope();
if let Some(var) = block.signature.get_positional(0) {
if let Some(var_id) = &var.var_id {
state.add_var(*var_id, x);
if numbered {
state.add_var(
*var_id,
Value::Record {
cols: vec!["index".into(), "item".into()],
vals: vec![
Value::Int {
val: idx as i64,
span,
},
x,
],
span,
},
);
} else {
state.add_var(*var_id, x);
}
}
}

View File

@ -1,7 +1,7 @@
use nu_engine::{eval_block, eval_expression};
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{IntoValueStream, Signature, SyntaxShape, Value};
use nu_protocol::{Example, IntoValueStream, Signature, Span, SyntaxShape, Value};
pub struct For;
@ -29,7 +29,11 @@ impl Command for For {
),
"range of the loop",
)
.required("block", SyntaxShape::Block, "the block to run")
.required(
"block",
SyntaxShape::Block(Some(vec![])),
"the block to run",
)
}
fn run(
@ -87,4 +91,42 @@ impl Command for For {
_ => Ok(Value::nothing()),
}
}
fn examples(&self) -> Vec<Example> {
let span = Span::unknown();
vec![
Example {
description: "Echo the square of each integer",
example: "for x in [1 2 3] { $x * $x }",
result: Some(vec![
Value::Int { val: 1, span },
Value::Int { val: 4, span },
Value::Int { val: 9, span },
]),
},
Example {
description: "Work with elements of a range",
example: "for $x in 1..3 { $x }",
result: Some(vec![
Value::Int { val: 1, span },
Value::Int { val: 2, span },
Value::Int { val: 3, span },
]),
},
Example {
description: "Number each item and echo a message",
example: "for $it in ['bob' 'fred'] --numbered { $\"($it.index) is ($it.item)\" }",
result: Some(vec![
Value::String {
val: "0 is bob".into(),
span,
},
Value::String {
val: "0 is fred".into(),
span,
},
]),
},
]
}
}

View File

@ -0,0 +1,92 @@
use std::cell::RefCell;
use std::rc::Rc;
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{ShellError, Signature, Span, Value, ValueStream};
pub struct Lines;
const SPLIT_CHAR: char = '\n';
impl Command for Lines {
fn name(&self) -> &str {
"lines"
}
fn usage(&self) -> &str {
"Converts input to lines"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("lines")
}
fn run(
&self,
_context: &EvaluationContext,
call: &Call,
input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
match input {
#[allow(clippy::needless_collect)]
// Collect is needed because the string may not live long enough for
// the Rc structure to continue using it. If split could take ownership
// of the split values, then this wouldn't be needed
Value::String { val, span } => {
let lines = val
.split(SPLIT_CHAR)
.map(|s| s.to_string())
.collect::<Vec<String>>();
let iter = lines.into_iter().filter_map(move |s| {
if !s.is_empty() {
Some(Value::String { val: s, span })
} else {
None
}
});
Ok(Value::Stream {
stream: ValueStream(Rc::new(RefCell::new(iter))),
span: Span::unknown(),
})
}
Value::Stream { stream, span: _ } => {
let iter = stream
.into_iter()
.filter_map(|value| {
if let Value::String { val, span } = value {
let inner = val
.split(SPLIT_CHAR)
.filter_map(|s| {
if !s.is_empty() {
Some(Value::String {
val: s.trim().into(),
span,
})
} else {
None
}
})
.collect::<Vec<Value>>();
Some(inner)
} else {
None
}
})
.flatten();
Ok(Value::Stream {
stream: ValueStream(Rc::new(RefCell::new(iter))),
span: Span::unknown(),
})
}
val => Err(ShellError::UnsupportedInput(
format!("Not supported input: {}", val.as_string()?),
call.head,
)),
}
}
}

View File

@ -0,0 +1,11 @@
mod each;
mod for_;
mod length;
mod lines;
mod where_;
pub use each::Each;
pub use for_::For;
pub use length::Length;
pub use lines::Lines;
pub use where_::Where;

View File

@ -0,0 +1,92 @@
use nu_engine::eval_expression;
use nu_protocol::ast::{Call, Expr, Expression};
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{IntoValueStream, ShellError, Signature, SyntaxShape, Value};
pub struct Where;
impl Command for Where {
fn name(&self) -> &str {
"where"
}
fn usage(&self) -> &str {
"Filter values based on a condition."
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("where").required("cond", SyntaxShape::RowCondition, "condition")
}
fn run(
&self,
context: &EvaluationContext,
call: &Call,
input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
let cond = call.positional[0].clone();
let context = context.enter_scope();
let (var_id, cond) = match cond {
Expression {
expr: Expr::RowCondition(var_id, expr),
..
} => (var_id, expr),
_ => return Err(ShellError::InternalError("Expected row condition".into())),
};
match input {
Value::Stream { stream, span } => {
let output_stream = stream
.filter(move |value| {
context.add_var(var_id, value.clone());
let result = eval_expression(&context, &cond);
match result {
Ok(result) => result.is_true(),
_ => false,
}
})
.into_value_stream();
Ok(Value::Stream {
stream: output_stream,
span,
})
}
Value::List { vals, span } => {
let output_stream = vals
.into_iter()
.filter(move |value| {
context.add_var(var_id, value.clone());
let result = eval_expression(&context, &cond);
match result {
Ok(result) => result.is_true(),
_ => false,
}
})
.into_value_stream();
Ok(Value::Stream {
stream: output_stream,
span,
})
}
x => {
context.add_var(var_id, x.clone());
let result = eval_expression(&context, &cond)?;
if result.is_true() {
Ok(x)
} else {
Ok(Value::Nothing { span: call.head })
}
}
}
}
}

View File

@ -0,0 +1,28 @@
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{ShellError, Signature, Value};
pub struct From;
impl Command for From {
fn name(&self) -> &str {
"from"
}
fn usage(&self) -> &str {
"Parse a string or binary data into structured data"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("from")
}
fn run(
&self,
_context: &EvaluationContext,
_call: &Call,
_input: Value,
) -> Result<nu_protocol::Value, ShellError> {
Ok(Value::nothing())
}
}

View File

@ -0,0 +1,111 @@
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{IntoValueStream, ShellError, Signature, Span, Value};
pub struct FromJson;
impl Command for FromJson {
fn name(&self) -> &str {
"from json"
}
fn usage(&self) -> &str {
"Convert from json to structured data"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("from json").switch(
"objects",
"treat each line as a separate value",
Some('o'),
)
}
fn run(
&self,
_context: &EvaluationContext,
call: &Call,
input: Value,
) -> Result<nu_protocol::Value, ShellError> {
let span = input.span();
let mut string_input = input.collect_string();
string_input.push('\n');
// TODO: turn this into a structured underline of the nu_json error
if call.has_flag("objects") {
#[allow(clippy::needless_collect)]
let lines: Vec<String> = string_input.lines().map(|x| x.to_string()).collect();
Ok(Value::Stream {
stream: lines
.into_iter()
.map(move |mut x| {
x.push('\n');
match convert_string_to_value(x, span) {
Ok(v) => v,
Err(error) => Value::Error { error },
}
})
.into_value_stream(),
span,
})
} else {
convert_string_to_value(string_input, span)
}
}
}
fn convert_nujson_to_value(value: &nu_json::Value, span: Span) -> Value {
match value {
nu_json::Value::Array(array) => {
let v: Vec<Value> = array
.iter()
.map(|x| convert_nujson_to_value(x, span))
.collect();
Value::List { vals: v, span }
}
nu_json::Value::Bool(b) => Value::Bool { val: *b, span },
nu_json::Value::F64(f) => Value::Float { val: *f, span },
nu_json::Value::I64(i) => Value::Int { val: *i, span },
nu_json::Value::Null => Value::Nothing { span },
nu_json::Value::Object(k) => {
let mut cols = vec![];
let mut vals = vec![];
for item in k {
cols.push(item.0.clone());
vals.push(convert_nujson_to_value(item.1, span));
}
Value::Record { cols, vals, span }
}
nu_json::Value::U64(u) => {
if *u > i64::MAX as u64 {
Value::Error {
error: ShellError::CantConvert("i64 sized integer".into(), span),
}
} else {
Value::Int {
val: *u as i64,
span,
}
}
}
nu_json::Value::String(s) => Value::String {
val: s.clone(),
span,
},
}
}
fn convert_string_to_value(string_input: String, span: Span) -> Result<Value, ShellError> {
let result: Result<nu_json::Value, nu_json::Error> = nu_json::from_str(&string_input);
match result {
Ok(value) => Ok(convert_nujson_to_value(&value, span)),
Err(_x) => Err(ShellError::CantConvert(
"structured data from json".into(),
span,
)),
}
}

View File

@ -0,0 +1,5 @@
mod command;
mod json;
pub use command::From;
pub use json::FromJson;

View File

@ -0,0 +1,3 @@
mod from;
pub use from::*;

View File

@ -1,27 +1,21 @@
mod alias;
mod benchmark;
mod build_string;
mod def;
mod core_commands;
mod default_context;
mod do_;
mod each;
mod for_;
mod if_;
mod length;
mod let_;
mod let_env;
mod source;
mod env;
mod experimental;
mod filesystem;
mod filters;
mod formats;
mod strings;
mod system;
mod viewers;
pub use alias::Alias;
pub use benchmark::Benchmark;
pub use build_string::BuildString;
pub use def::Def;
pub use default_context::create_default_context;
pub use do_::Do;
pub use each::Each;
pub use for_::For;
pub use if_::If;
pub use length::Length;
pub use let_::Let;
pub use let_env::LetEnv;
pub use source::Source;
pub use core_commands::*;
pub use default_context::*;
pub use env::*;
pub use experimental::*;
pub use filesystem::*;
pub use filters::*;
pub use formats::*;
pub use strings::*;
pub use system::*;
pub use viewers::*;

View File

@ -1,7 +1,7 @@
use nu_engine::eval_expression;
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{Signature, SyntaxShape, Value};
use nu_protocol::{ShellError, Signature, SyntaxShape, Value};
pub struct BuildString;
@ -24,13 +24,12 @@ impl Command for BuildString {
call: &Call,
_input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
let mut output = vec![];
let output = call
.positional
.iter()
.map(|expr| eval_expression(context, expr).map(|val| val.into_string()))
.collect::<Result<Vec<String>, ShellError>>()?;
for expr in &call.positional {
let val = eval_expression(context, expr)?;
output.push(val.into_string());
}
Ok(Value::String {
val: output.join(""),
span: call.head,

View File

@ -0,0 +1,3 @@
mod build_string;
pub use build_string::BuildString;

View File

@ -17,7 +17,11 @@ impl Command for Benchmark {
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("benchmark").required("block", SyntaxShape::Block, "the block to run")
Signature::build("benchmark").required(
"block",
SyntaxShape::Block(Some(vec![])),
"the block to run",
)
}
fn run(

View File

@ -0,0 +1,9 @@
mod benchmark;
mod ps;
mod run_external;
mod sys;
pub use benchmark::Benchmark;
pub use ps::Ps;
pub use run_external::{External, ExternalCommand};
pub use sys::Sys;

View File

@ -0,0 +1,128 @@
use nu_protocol::{
ast::Call,
engine::{Command, EvaluationContext},
Example, ShellError, Signature, Value,
};
use sysinfo::{ProcessExt, System, SystemExt};
pub struct Ps;
impl Command for Ps {
fn name(&self) -> &str {
"ps"
}
fn signature(&self) -> Signature {
Signature::build("ps")
.desc("View information about system processes.")
.switch(
"long",
"list all available columns for each entry",
Some('l'),
)
.filter()
}
fn usage(&self) -> &str {
"View information about system processes."
}
fn run(
&self,
_context: &EvaluationContext,
call: &Call,
_input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
run_ps(call)
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "List the system processes",
example: "ps",
result: None,
}]
}
}
fn run_ps(call: &Call) -> Result<Value, ShellError> {
let span = call.head;
let long = call.has_flag("long");
let mut sys = System::new_all();
sys.refresh_all();
let mut output = vec![];
let result: Vec<_> = sys.processes().iter().map(|x| *x.0).collect();
for pid in result {
if let Some(result) = sys.process(pid) {
let mut cols = vec![];
let mut vals = vec![];
cols.push("pid".into());
vals.push(Value::Int {
val: pid as i64,
span,
});
cols.push("name".into());
vals.push(Value::String {
val: result.name().into(),
span,
});
cols.push("status".into());
vals.push(Value::String {
val: format!("{:?}", result.status()),
span,
});
cols.push("cpu".into());
vals.push(Value::Float {
val: result.cpu_usage() as f64,
span,
});
cols.push("mem".into());
vals.push(Value::Filesize {
val: result.memory() * 1000,
span,
});
cols.push("virtual".into());
vals.push(Value::Filesize {
val: result.virtual_memory() * 1000,
span,
});
if long {
cols.push("parent".into());
if let Some(parent) = result.parent() {
vals.push(Value::Int {
val: parent as i64,
span,
});
} else {
vals.push(Value::Nothing { span });
}
cols.push("exe".into());
vals.push(Value::String {
val: result.exe().to_string_lossy().to_string(),
span,
});
cols.push("command".into());
vals.push(Value::String {
val: result.cmd().join(" "),
span,
});
}
output.push(Value::Record { cols, vals, span });
}
}
Ok(Value::List { vals: output, span })
}

View File

@ -0,0 +1,281 @@
use std::borrow::Cow;
use std::cell::RefCell;
use std::env;
use std::io::{BufRead, BufReader, Write};
use std::process::{ChildStdin, Command as CommandSys, Stdio};
use std::rc::Rc;
use std::sync::mpsc;
use nu_protocol::{
ast::{Call, Expression},
engine::{Command, EvaluationContext},
ShellError, Signature, SyntaxShape, Value,
};
use nu_protocol::{Span, ValueStream};
use nu_engine::eval_expression;
const OUTPUT_BUFFER_SIZE: usize = 8192;
pub struct External;
impl Command for External {
fn name(&self) -> &str {
"run_external"
}
fn usage(&self) -> &str {
"Runs external command"
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("run_external")
.switch("last_expression", "last_expression", None)
.rest("rest", SyntaxShape::Any, "external command to run")
}
fn run(
&self,
context: &EvaluationContext,
call: &Call,
input: Value,
) -> Result<Value, ShellError> {
let command = ExternalCommand::try_new(call, context)?;
command.run_with_input(input)
}
}
pub struct ExternalCommand<'call, 'contex> {
pub name: &'call Expression,
pub args: &'call [Expression],
pub context: &'contex EvaluationContext,
pub last_expression: bool,
}
impl<'call, 'contex> ExternalCommand<'call, 'contex> {
pub fn try_new(
call: &'call Call,
context: &'contex EvaluationContext,
) -> Result<Self, ShellError> {
if call.positional.is_empty() {
return Err(ShellError::ExternalNotSupported(call.head));
}
Ok(Self {
name: &call.positional[0],
args: &call.positional[1..],
context,
last_expression: call.has_flag("last_expression"),
})
}
pub fn get_name(&self) -> Result<String, ShellError> {
let value = eval_expression(self.context, self.name)?;
value.as_string()
}
pub fn get_args(&self) -> Vec<String> {
self.args
.iter()
.filter_map(|expr| eval_expression(self.context, expr).ok())
.filter_map(|value| value.as_string().ok())
.collect()
}
pub fn run_with_input(&self, input: Value) -> Result<Value, ShellError> {
let mut process = self.create_command();
// TODO. We don't have a way to know the current directory
// This should be information from the EvaluationContex or EngineState
let path = env::current_dir().unwrap();
process.current_dir(path);
let envs = self.context.stack.get_env_vars();
process.envs(envs);
// If the external is not the last command, its output will get piped
// either as a string or binary
if !self.last_expression {
process.stdout(Stdio::piped());
}
// If there is an input from the pipeline. The stdin from the process
// is piped so it can be used to send the input information
if let Value::String { .. } = input {
process.stdin(Stdio::piped());
}
if let Value::Stream { .. } = input {
process.stdin(Stdio::piped());
}
match process.spawn() {
Err(err) => Err(ShellError::ExternalCommand(
format!("{}", err),
self.name.span,
)),
Ok(mut child) => {
// if there is a string or a stream, that is sent to the pipe std
match input {
Value::String { val, span: _ } => {
if let Some(mut stdin_write) = child.stdin.take() {
self.write_to_stdin(&mut stdin_write, val.as_bytes())?
}
}
Value::Binary { val, span: _ } => {
if let Some(mut stdin_write) = child.stdin.take() {
self.write_to_stdin(&mut stdin_write, &val)?
}
}
Value::Stream { stream, span: _ } => {
if let Some(mut stdin_write) = child.stdin.take() {
for value in stream {
match value {
Value::String { val, span: _ } => {
self.write_to_stdin(&mut stdin_write, val.as_bytes())?
}
Value::Binary { val, span: _ } => {
self.write_to_stdin(&mut stdin_write, &val)?
}
_ => continue,
}
}
}
}
_ => (),
}
// If this external is not the last expression, then its output is piped to a channel
// and we create a ValueStream that can be consumed
let value = if !self.last_expression {
let (tx, rx) = mpsc::channel();
let stdout = child.stdout.take().ok_or_else(|| {
ShellError::ExternalCommand(
"Error taking stdout from external".to_string(),
self.name.span,
)
})?;
std::thread::spawn(move || {
// Stdout is read using the Buffer reader. It will do so until there is an
// error or there are no more bytes to read
let mut buf_read = BufReader::with_capacity(OUTPUT_BUFFER_SIZE, stdout);
while let Ok(bytes) = buf_read.fill_buf() {
if bytes.is_empty() {
break;
}
// The Cow generated from the function represents the conversion
// from bytes to String. If no replacements are required, then the
// borrowed value is a proper UTF-8 string. The Owned option represents
// a string where the values had to be replaced, thus marking it as bytes
let data = match String::from_utf8_lossy(bytes) {
Cow::Borrowed(s) => Data::String(s.into()),
Cow::Owned(_) => Data::Bytes(bytes.to_vec()),
};
let length = bytes.len();
buf_read.consume(length);
match tx.send(data) {
Ok(_) => continue,
Err(_) => break,
}
}
});
// The ValueStream is consumed by the next expression in the pipeline
Value::Stream {
stream: ValueStream(Rc::new(RefCell::new(ChannelReceiver::new(rx)))),
span: Span::unknown(),
}
} else {
Value::nothing()
};
match child.wait() {
Err(err) => Err(ShellError::ExternalCommand(
format!("{}", err),
self.name.span,
)),
Ok(_) => Ok(value),
}
}
}
}
fn create_command(&self) -> CommandSys {
// in all the other cases shell out
if cfg!(windows) {
//TODO. This should be modifiable from the config file.
// We could give the option to call from powershell
// for minimal builds cwd is unused
let mut process = CommandSys::new("cmd");
process.arg("/c");
process.arg(&self.get_name().unwrap());
for arg in self.get_args() {
// Clean the args before we use them:
// https://stackoverflow.com/questions/1200235/how-to-pass-a-quoted-pipe-character-to-cmd-exe
// cmd.exe needs to have a caret to escape a pipe
let arg = arg.replace("|", "^|");
process.arg(&arg);
}
process
} else {
let cmd_with_args = vec![self.get_name().unwrap(), self.get_args().join(" ")].join(" ");
let mut process = CommandSys::new("sh");
process.arg("-c").arg(cmd_with_args);
process
}
}
fn write_to_stdin(&self, stdin_write: &mut ChildStdin, val: &[u8]) -> Result<(), ShellError> {
if stdin_write.write(val).is_err() {
Err(ShellError::ExternalCommand(
"Error writing input to stdin".to_string(),
self.name.span,
))
} else {
Ok(())
}
}
}
// The piped data from stdout from the external command can be either String
// or binary. We use this enum to pass the data from the spawned process
enum Data {
String(String),
Bytes(Vec<u8>),
}
// Receiver used for the ValueStream
// It implements iterator so it can be used as a ValueStream
struct ChannelReceiver {
rx: mpsc::Receiver<Data>,
}
impl ChannelReceiver {
pub fn new(rx: mpsc::Receiver<Data>) -> Self {
Self { rx }
}
}
impl Iterator for ChannelReceiver {
type Item = Value;
fn next(&mut self) -> Option<Self::Item> {
match self.rx.recv() {
Ok(v) => match v {
Data::String(s) => Some(Value::String {
val: s,
span: Span::unknown(),
}),
Data::Bytes(b) => Some(Value::Binary {
val: b,
span: Span::unknown(),
}),
},
Err(_) => None,
}
}
}

View File

@ -0,0 +1,356 @@
use nu_protocol::{
ast::Call,
engine::{Command, EvaluationContext},
Example, ShellError, Signature, Span, Value,
};
use sysinfo::{ComponentExt, DiskExt, NetworkExt, ProcessorExt, System, SystemExt, UserExt};
pub struct Sys;
impl Command for Sys {
fn name(&self) -> &str {
"sys"
}
fn signature(&self) -> Signature {
Signature::build("sys")
.desc("View information about the current system.")
.filter()
}
fn usage(&self) -> &str {
"View information about the system."
}
fn run(
&self,
_context: &EvaluationContext,
call: &Call,
_input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
run_sys(call)
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Show info about the system",
example: "sys",
result: None,
}]
}
}
fn run_sys(call: &Call) -> Result<Value, ShellError> {
let span = call.head;
let mut sys = System::new();
let mut headers = vec![];
let mut values = vec![];
if let Some(value) = host(&mut sys, span) {
headers.push("host".into());
values.push(value);
}
if let Some(value) = cpu(&mut sys, span) {
headers.push("cpu".into());
values.push(value);
}
if let Some(value) = disks(&mut sys, span) {
headers.push("disks".into());
values.push(value);
}
if let Some(value) = mem(&mut sys, span) {
headers.push("mem".into());
values.push(value);
}
if let Some(value) = temp(&mut sys, span) {
headers.push("temp".into());
values.push(value);
}
if let Some(value) = net(&mut sys, span) {
headers.push("net".into());
values.push(value);
}
Ok(Value::Record {
cols: headers,
vals: values,
span,
})
}
pub fn trim_cstyle_null(s: String) -> String {
s.trim_matches(char::from(0)).to_string()
}
pub fn disks(sys: &mut System, span: Span) -> Option<Value> {
sys.refresh_disks();
sys.refresh_disks_list();
let mut output = vec![];
for disk in sys.disks() {
let mut cols = vec![];
let mut vals = vec![];
cols.push("device".into());
vals.push(Value::String {
val: trim_cstyle_null(disk.name().to_string_lossy().to_string()),
span,
});
cols.push("type".into());
vals.push(Value::String {
val: trim_cstyle_null(String::from_utf8_lossy(disk.file_system()).to_string()),
span,
});
cols.push("mount".into());
vals.push(Value::String {
val: disk.mount_point().to_string_lossy().to_string(),
span,
});
cols.push("total".into());
vals.push(Value::Filesize {
val: disk.total_space(),
span,
});
cols.push("free".into());
vals.push(Value::Filesize {
val: disk.available_space(),
span,
});
output.push(Value::Record { cols, vals, span });
}
if !output.is_empty() {
Some(Value::List { vals: output, span })
} else {
None
}
}
pub fn net(sys: &mut System, span: Span) -> Option<Value> {
sys.refresh_networks();
sys.refresh_networks_list();
let mut output = vec![];
for (iface, data) in sys.networks() {
let mut cols = vec![];
let mut vals = vec![];
cols.push("name".into());
vals.push(Value::String {
val: trim_cstyle_null(iface.to_string()),
span,
});
cols.push("sent".into());
vals.push(Value::Filesize {
val: data.total_transmitted(),
span,
});
cols.push("recv".into());
vals.push(Value::Filesize {
val: data.total_received(),
span,
});
output.push(Value::Record { cols, vals, span });
}
if !output.is_empty() {
Some(Value::List { vals: output, span })
} else {
None
}
}
pub fn cpu(sys: &mut System, span: Span) -> Option<Value> {
sys.refresh_cpu();
let mut output = vec![];
for cpu in sys.processors() {
let mut cols = vec![];
let mut vals = vec![];
cols.push("name".into());
vals.push(Value::String {
val: trim_cstyle_null(cpu.name().to_string()),
span,
});
cols.push("brand".into());
vals.push(Value::String {
val: trim_cstyle_null(cpu.brand().to_string()),
span,
});
cols.push("freq".into());
vals.push(Value::Int {
val: cpu.frequency() as i64,
span,
});
output.push(Value::Record { cols, vals, span });
}
if !output.is_empty() {
Some(Value::List { vals: output, span })
} else {
None
}
}
pub fn mem(sys: &mut System, span: Span) -> Option<Value> {
sys.refresh_memory();
let mut cols = vec![];
let mut vals = vec![];
let total_mem = sys.total_memory();
let free_mem = sys.free_memory();
let total_swap = sys.total_swap();
let free_swap = sys.free_swap();
cols.push("total".into());
vals.push(Value::Filesize {
val: total_mem * 1000,
span,
});
cols.push("free".into());
vals.push(Value::Filesize {
val: free_mem * 1000,
span,
});
cols.push("swap total".into());
vals.push(Value::Filesize {
val: total_swap * 1000,
span,
});
cols.push("swap free".into());
vals.push(Value::Filesize {
val: free_swap * 1000,
span,
});
Some(Value::Record { cols, vals, span })
}
pub fn host(sys: &mut System, span: Span) -> Option<Value> {
sys.refresh_users_list();
let mut cols = vec![];
let mut vals = vec![];
if let Some(name) = sys.name() {
cols.push("name".into());
vals.push(Value::String {
val: trim_cstyle_null(name),
span,
});
}
if let Some(version) = sys.os_version() {
cols.push("os version".into());
vals.push(Value::String {
val: trim_cstyle_null(version),
span,
});
}
if let Some(version) = sys.kernel_version() {
cols.push("kernel version".into());
vals.push(Value::String {
val: trim_cstyle_null(version),
span,
});
}
if let Some(hostname) = sys.host_name() {
cols.push("hostname".into());
vals.push(Value::String {
val: trim_cstyle_null(hostname),
span,
});
}
// dict.insert_untagged(
// "uptime",
// UntaggedValue::duration(1000000000 * sys.uptime() as i64),
// );
let mut users = vec![];
for user in sys.users() {
let mut cols = vec![];
let mut vals = vec![];
cols.push("name".into());
vals.push(Value::String {
val: trim_cstyle_null(user.name().to_string()),
span,
});
let mut groups = vec![];
for group in user.groups() {
groups.push(Value::String {
val: trim_cstyle_null(group.to_string()),
span,
});
}
cols.push("groups".into());
vals.push(Value::List { vals: groups, span });
users.push(Value::Record { cols, vals, span });
}
if !users.is_empty() {
cols.push("sessions".into());
vals.push(Value::List { vals: users, span });
}
Some(Value::Record { cols, vals, span })
}
pub fn temp(sys: &mut System, span: Span) -> Option<Value> {
sys.refresh_components();
sys.refresh_components_list();
let mut output = vec![];
for component in sys.components() {
let mut cols = vec![];
let mut vals = vec![];
cols.push("unit".into());
vals.push(Value::String {
val: component.label().to_string(),
span,
});
cols.push("temp".into());
vals.push(Value::Float {
val: component.temperature() as f64,
span,
});
cols.push("high".into());
vals.push(Value::Float {
val: component.max() as f64,
span,
});
if let Some(critical) = component.critical() {
cols.push("critical".into());
vals.push(Value::Float {
val: critical as f64,
span,
});
}
output.push(Value::Record { cols, vals, span });
}
if !output.is_empty() {
Some(Value::List { vals: output, span })
} else {
None
}
}

View File

@ -0,0 +1,3 @@
mod table;
pub use table::Table;

View File

@ -0,0 +1,166 @@
use std::collections::HashMap;
use nu_protocol::ast::{Call, PathMember};
use nu_protocol::engine::{Command, EvaluationContext};
use nu_protocol::{Signature, Span, Value};
use nu_table::StyledString;
pub struct Table;
//NOTE: this is not a real implementation :D. It's just a simple one to test with until we port the real one.
impl Command for Table {
fn name(&self) -> &str {
"table"
}
fn usage(&self) -> &str {
"Render the table."
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("table")
}
fn run(
&self,
_context: &EvaluationContext,
call: &Call,
input: Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
match input {
Value::List { vals, .. } => {
let table = convert_to_table(vals);
if let Some(table) = table {
let result = nu_table::draw_table(&table, 80, &HashMap::new());
Ok(Value::String {
val: result,
span: call.head,
})
} else {
Ok(Value::Nothing { span: call.head })
}
}
Value::Stream { stream, .. } => {
let table = convert_to_table(stream);
if let Some(table) = table {
let result = nu_table::draw_table(&table, 80, &HashMap::new());
Ok(Value::String {
val: result,
span: call.head,
})
} else {
Ok(Value::Nothing { span: call.head })
}
}
Value::Record { cols, vals, .. } => {
let mut output = vec![];
for (c, v) in cols.into_iter().zip(vals.into_iter()) {
output.push(vec![
StyledString {
contents: c,
style: nu_table::TextStyle::default_header(),
},
StyledString {
contents: v.into_string(),
style: nu_table::TextStyle::default(),
},
])
}
let table = nu_table::Table {
headers: vec![],
data: output,
theme: nu_table::Theme::rounded(),
};
let result = nu_table::draw_table(&table, 80, &HashMap::new());
Ok(Value::String {
val: result,
span: call.head,
})
}
x => Ok(x),
}
}
}
fn convert_to_table(iter: impl IntoIterator<Item = Value>) -> Option<nu_table::Table> {
let mut iter = iter.into_iter().peekable();
if let Some(first) = iter.peek() {
let mut headers = first.columns();
if !headers.is_empty() {
headers.insert(0, "#".into());
}
let mut data = vec![];
for (row_num, item) in iter.enumerate() {
let mut row = vec![row_num.to_string()];
if headers.is_empty() {
row.push(item.into_string())
} else {
for header in headers.iter().skip(1) {
let result = match item {
Value::Record { .. } => {
item.clone().follow_cell_path(&[PathMember::String {
val: header.into(),
span: Span::unknown(),
}])
}
_ => Ok(item.clone()),
};
match result {
Ok(value) => row.push(value.into_string()),
Err(_) => row.push(String::new()),
}
}
}
data.push(row);
}
Some(nu_table::Table {
headers: headers
.into_iter()
.map(|x| StyledString {
contents: x,
style: nu_table::TextStyle::default_header(),
})
.collect(),
data: data
.into_iter()
.map(|x| {
x.into_iter()
.enumerate()
.map(|(col, y)| {
if col == 0 {
StyledString {
contents: y,
style: nu_table::TextStyle::default_header(),
}
} else {
StyledString {
contents: y,
style: nu_table::TextStyle::basic_left(),
}
}
})
.collect::<Vec<StyledString>>()
})
.collect(),
theme: nu_table::Theme::rounded(),
})
} else {
None
}
}

View File

@ -5,4 +5,5 @@ edition = "2018"
[dependencies]
nu-parser = { path = "../nu-parser" }
nu-protocol = { path = "../nu-protocol" }
nu-protocol = { path = "../nu-protocol" }
nu-path = { path = "../nu-path" }

View File

@ -0,0 +1,47 @@
use nu_protocol::{ast::Call, engine::EvaluationContext, ShellError};
use crate::{eval_expression, FromValue};
pub trait CallExt {
fn get_flag<T: FromValue>(
&self,
context: &EvaluationContext,
name: &str,
) -> Result<Option<T>, ShellError>;
fn rest<T: FromValue>(
&self,
context: &EvaluationContext,
starting_pos: usize,
) -> Result<Vec<T>, ShellError>;
}
impl CallExt for Call {
fn get_flag<T: FromValue>(
&self,
context: &EvaluationContext,
name: &str,
) -> Result<Option<T>, ShellError> {
if let Some(expr) = self.get_flag_expr(name) {
let result = eval_expression(context, &expr)?;
FromValue::from_value(&result).map(Some)
} else {
Ok(None)
}
}
fn rest<T: FromValue>(
&self,
context: &EvaluationContext,
starting_pos: usize,
) -> Result<Vec<T>, ShellError> {
let mut output = vec![];
for expr in self.positional.iter().skip(starting_pos) {
let result = eval_expression(context, expr)?;
output.push(FromValue::from_value(&result)?);
}
Ok(output)
}
}

View File

@ -1,7 +1,7 @@
use nu_parser::parse;
use nu_protocol::ast::{Block, Call, Expr, Expression, Operator, Statement};
use nu_protocol::engine::{EngineState, EvaluationContext, StateWorkingSet};
use nu_protocol::{Range, ShellError, Span, Value};
use nu_protocol::engine::EvaluationContext;
use nu_protocol::{Range, ShellError, Span, Type, Value};
pub fn eval_operator(op: &Expression) -> Result<Operator, ShellError> {
match op {
@ -69,6 +69,44 @@ fn eval_call(context: &EvaluationContext, call: &Call, input: Value) -> Result<V
}
}
fn eval_external(
context: &EvaluationContext,
name: &Span,
args: &[Span],
input: Value,
last_expression: bool,
) -> Result<Value, ShellError> {
let engine_state = context.engine_state.borrow();
let decl_id = engine_state
.find_decl("run_external".as_bytes())
.ok_or_else(|| ShellError::ExternalNotSupported(*name))?;
let command = engine_state.get_decl(decl_id);
let mut call = Call::new();
call.positional = [*name]
.iter()
.chain(args.iter())
.map(|span| {
let contents = engine_state.get_span_contents(span);
let val = String::from_utf8_lossy(contents);
Expression {
expr: Expr::String(val.into()),
span: *span,
ty: Type::String,
custom_completion: None,
}
})
.collect();
if last_expression {
call.named.push(("last_expression".into(), None))
}
command.run(context, &call, input)
}
pub fn eval_expression(
context: &EvaluationContext,
expr: &Expression,
@ -86,13 +124,19 @@ pub fn eval_expression(
val: *f,
span: expr.span,
}),
Expr::Range(from, to, operator) => {
// TODO: Embed the min/max into Range and set max to be the true max
Expr::Range(from, next, to, operator) => {
let from = if let Some(f) = from {
eval_expression(context, f)?
} else {
Value::Int {
val: 0i64,
Value::Nothing {
span: Span::unknown(),
}
};
let next = if let Some(s) = next {
eval_expression(context, s)?
} else {
Value::Nothing {
span: Span::unknown(),
}
};
@ -100,44 +144,29 @@ pub fn eval_expression(
let to = if let Some(t) = to {
eval_expression(context, t)?
} else {
Value::Int {
val: 100i64,
Value::Nothing {
span: Span::unknown(),
}
};
let range = match (&from, &to) {
(&Value::Int { .. }, &Value::Int { .. }) => Range {
from: from.clone(),
to: to.clone(),
inclusion: operator.inclusion,
},
(lhs, rhs) => {
return Err(ShellError::OperatorMismatch {
op_span: operator.span,
lhs_ty: lhs.get_type(),
lhs_span: lhs.span(),
rhs_ty: rhs.get_type(),
rhs_span: rhs.span(),
})
}
};
Ok(Value::Range {
val: Box::new(range),
val: Box::new(Range::new(expr.span, from, next, to, operator)?),
span: expr.span,
})
}
Expr::Var(var_id) => context
.get_var(*var_id)
.map_err(move |_| ShellError::VariableNotFoundAtRuntime(expr.span)),
Expr::FullCellPath(column_path) => {
let value = eval_expression(context, &column_path.head)?;
Expr::FullCellPath(cell_path) => {
let value = eval_expression(context, &cell_path.head)?;
value.follow_cell_path(&column_path.tail)
value.follow_cell_path(&cell_path.tail)
}
Expr::RowCondition(_, expr) => eval_expression(context, expr),
Expr::Call(call) => eval_call(context, call, Value::nothing()),
Expr::ExternalCall(_, _) => Err(ShellError::ExternalNotSupported(expr.span)),
Expr::ExternalCall(name, args) => {
eval_external(context, name, args, Value::nothing(), true)
}
Expr::Operator(_) => Ok(Value::Nothing { span: expr.span }),
Expr::BinaryOp(lhs, op, rhs) => {
let op_span = op.span;
@ -159,7 +188,6 @@ pub fn eval_expression(
x => Err(ShellError::UnsupportedOperator(x, op_span)),
}
}
Expr::Subexpression(block_id) => {
let engine_state = context.engine_state.borrow();
let block = engine_state.get_block(*block_id);
@ -219,9 +247,9 @@ pub fn eval_block(
block: &Block,
mut input: Value,
) -> Result<Value, ShellError> {
for stmt in &block.stmts {
for stmt in block.stmts.iter() {
if let Statement::Pipeline(pipeline) = stmt {
for elem in &pipeline.expressions {
for (i, elem) in pipeline.expressions.iter().enumerate() {
match elem {
Expression {
expr: Expr::Call(call),
@ -229,6 +257,18 @@ pub fn eval_block(
} => {
input = eval_call(context, call, input)?;
}
Expression {
expr: Expr::ExternalCall(name, args),
..
} => {
input = eval_external(
context,
name,
args,
input,
i == pipeline.expressions.len() - 1,
)?;
}
elem => {
input = eval_expression(context, elem)?;

View File

@ -0,0 +1,265 @@
// use std::path::PathBuf;
// use nu_path::expand_path;
use nu_protocol::ShellError;
use nu_protocol::{Range, Spanned, Value};
pub trait FromValue: Sized {
fn from_value(v: &Value) -> Result<Self, ShellError>;
}
impl FromValue for Value {
fn from_value(v: &Value) -> Result<Self, ShellError> {
Ok(v.clone())
}
}
impl FromValue for Spanned<i64> {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value::Int { val, span } => Ok(Spanned {
item: *val,
span: *span,
}),
Value::Filesize { val, span } => Ok(Spanned {
// FIXME: error check that this fits
item: *val as i64,
span: *span,
}),
Value::Duration { val, span } => Ok(Spanned {
// FIXME: error check that this fits
item: *val as i64,
span: *span,
}),
v => Err(ShellError::CantConvert("integer".into(), v.span())),
}
}
}
impl FromValue for i64 {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value::Int { val, .. } => Ok(*val),
Value::Filesize { val, .. } => Ok(
// FIXME: error check that this fits
*val as i64,
),
Value::Duration { val, .. } => Ok(
// FIXME: error check that this fits
*val as i64,
),
v => Err(ShellError::CantConvert("integer".into(), v.span())),
}
}
}
impl FromValue for Spanned<f64> {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value::Int { val, span } => Ok(Spanned {
item: *val as f64,
span: *span,
}),
Value::Float { val, span } => Ok(Spanned {
// FIXME: error check that this fits
item: *val,
span: *span,
}),
v => Err(ShellError::CantConvert("float".into(), v.span())),
}
}
}
impl FromValue for f64 {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value::Float { val, .. } => Ok(*val),
Value::Int { val, .. } => Ok(*val as f64),
v => Err(ShellError::CantConvert("float".into(), v.span())),
}
}
}
impl FromValue for String {
fn from_value(v: &Value) -> Result<Self, ShellError> {
// FIXME: we may want to fail a little nicer here
Ok(v.clone().into_string())
}
}
impl FromValue for Spanned<String> {
fn from_value(v: &Value) -> Result<Self, ShellError> {
Ok(Spanned {
item: v.clone().into_string(),
span: v.span(),
})
}
}
//FIXME
/*
impl FromValue for ColumnPath {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value:: => Ok(c.clone()),
v => Err(ShellError::type_error("column path", v.spanned_type_name())),
}
}
}
impl FromValue for bool {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value {
value: UntaggedValue::Primitive(Primitive::Boolean(b)),
..
} => Ok(*b),
Value {
value: UntaggedValue::Row(_),
..
} => {
let mut shell_error = ShellError::type_error("boolean", v.spanned_type_name());
shell_error.notes.push(
"Note: you can access columns using dot. eg) $it.column or (ls).column".into(),
);
Err(shell_error)
}
v => Err(ShellError::type_error("boolean", v.spanned_type_name())),
}
}
}
*/
impl FromValue for Spanned<bool> {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value::Bool { val, span } => Ok(Spanned {
item: *val,
span: *span,
}),
v => Err(ShellError::CantConvert("bool".into(), v.span())),
}
}
}
// impl FromValue for DateTime<FixedOffset> {
// fn from_value(v: &Value) -> Result<Self, ShellError> {
// match v {
// Value {
// value: UntaggedValue::Primitive(Primitive::Date(d)),
// ..
// } => Ok(*d),
// Value {
// value: UntaggedValue::Row(_),
// ..
// } => {
// let mut shell_error = ShellError::type_error("date", v.spanned_type_name());
// shell_error.notes.push(
// "Note: you can access columns using dot. eg) $it.column or (ls).column".into(),
// );
// Err(shell_error)
// }
// v => Err(ShellError::type_error("date", v.spanned_type_name())),
// }
// }
// }
impl FromValue for Range {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value::Range { val, .. } => Ok((**val).clone()),
v => Err(ShellError::CantConvert("range".into(), v.span())),
}
}
}
impl FromValue for Spanned<Range> {
fn from_value(v: &Value) -> Result<Self, ShellError> {
match v {
Value::Range { val, span } => Ok(Spanned {
item: (**val).clone(),
span: *span,
}),
v => Err(ShellError::CantConvert("range".into(), v.span())),
}
}
}
// impl FromValue for Vec<u8> {
// fn from_value(v: &Value) -> Result<Self, ShellError> {
// match v {
// Value {
// value: UntaggedValue::Primitive(Primitive::Binary(b)),
// ..
// } => Ok(b.clone()),
// Value {
// value: UntaggedValue::Primitive(Primitive::String(s)),
// ..
// } => Ok(s.bytes().collect()),
// Value {
// value: UntaggedValue::Row(_),
// ..
// } => {
// let mut shell_error = ShellError::type_error("binary data", v.spanned_type_name());
// shell_error.notes.push(
// "Note: you can access columns using dot. eg) $it.column or (ls).column".into(),
// );
// Err(shell_error)
// }
// v => Err(ShellError::type_error("binary data", v.spanned_type_name())),
// }
// }
// }
// impl FromValue for Dictionary {
// fn from_value(v: &Value) -> Result<Self, ShellError> {
// match v {
// Value {
// value: UntaggedValue::Row(r),
// ..
// } => Ok(r.clone()),
// v => Err(ShellError::type_error("row", v.spanned_type_name())),
// }
// }
// }
// impl FromValue for CapturedBlock {
// fn from_value(v: &Value) -> Result<Self, ShellError> {
// match v {
// Value {
// value: UntaggedValue::Block(b),
// ..
// } => Ok((**b).clone()),
// Value {
// value: UntaggedValue::Row(_),
// ..
// } => {
// let mut shell_error = ShellError::type_error("block", v.spanned_type_name());
// shell_error.notes.push(
// "Note: you can access columns using dot. eg) $it.column or (ls).column".into(),
// );
// Err(shell_error)
// }
// v => Err(ShellError::type_error("block", v.spanned_type_name())),
// }
// }
// }
// impl FromValue for Vec<Value> {
// fn from_value(v: &Value) -> Result<Self, ShellError> {
// match v {
// Value {
// value: UntaggedValue::Table(t),
// ..
// } => Ok(t.clone()),
// Value {
// value: UntaggedValue::Row(_),
// ..
// } => Ok(vec![v.clone()]),
// v => Err(ShellError::type_error("table", v.spanned_type_name())),
// }
// }
// }

View File

@ -1,3 +1,7 @@
mod call_ext;
mod eval;
mod from_value;
pub use call_ext::CallExt;
pub use eval::{eval_block, eval_expression, eval_operator};
pub use from_value::FromValue;

24
crates/nu-json/Cargo.toml Normal file
View File

@ -0,0 +1,24 @@
[package]
authors = ["The Nu Project Contributors", "Christian Zangl <laktak@cdak.net>"]
description = "Fork of serde-hjson"
edition = "2018"
license = "MIT"
name = "nu-json"
version = "0.37.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
preserve_order = ["linked-hash-map", "linked-hash-map/serde_impl"]
default = ["preserve_order"]
[dependencies]
serde = "1.0"
num-traits = "0.2.14"
regex = "^1.0"
lazy_static = "1"
linked-hash-map = { version="0.5", optional=true }
[dev-dependencies]
nu-path = { version = "0.37.1", path="../nu-path" }
serde_json = "1.0.39"

29
crates/nu-json/LICENSE Normal file
View File

@ -0,0 +1,29 @@
The MIT License (MIT)
Copyright (c) 2014 The Rust Project Developers
Copyright (c) 2016 Christian Zangl
Copyright (c) 2020 The Nu Project Contributors
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,115 @@
use serde::ser;
use crate::value::{self, Map, Value};
/// This structure provides a simple interface for constructing a JSON array.
pub struct ArrayBuilder {
array: Vec<Value>,
}
impl Default for ArrayBuilder {
fn default() -> Self {
Self::new()
}
}
impl ArrayBuilder {
/// Construct an `ObjectBuilder`.
pub fn new() -> ArrayBuilder {
ArrayBuilder { array: Vec::new() }
}
/// Return the constructed `Value`.
pub fn unwrap(self) -> Value {
Value::Array(self.array)
}
/// Insert a value into the array.
pub fn push<T: ser::Serialize>(mut self, v: T) -> ArrayBuilder {
self.array
.push(value::to_value(&v).expect("failed to serialize"));
self
}
/// Creates and passes an `ArrayBuilder` into a closure, then inserts the resulting array into
/// this array.
pub fn push_array<F>(mut self, f: F) -> ArrayBuilder
where
F: FnOnce(ArrayBuilder) -> ArrayBuilder,
{
let builder = ArrayBuilder::new();
self.array.push(f(builder).unwrap());
self
}
/// Creates and passes an `ArrayBuilder` into a closure, then inserts the resulting object into
/// this array.
pub fn push_object<F>(mut self, f: F) -> ArrayBuilder
where
F: FnOnce(ObjectBuilder) -> ObjectBuilder,
{
let builder = ObjectBuilder::new();
self.array.push(f(builder).unwrap());
self
}
}
/// This structure provides a simple interface for constructing a JSON object.
pub struct ObjectBuilder {
object: Map<String, Value>,
}
impl Default for ObjectBuilder {
fn default() -> Self {
Self::new()
}
}
impl ObjectBuilder {
/// Construct an `ObjectBuilder`.
pub fn new() -> ObjectBuilder {
ObjectBuilder { object: Map::new() }
}
/// Return the constructed `Value`.
pub fn unwrap(self) -> Value {
Value::Object(self.object)
}
/// Insert a key-value pair into the object.
pub fn insert<S, V>(mut self, key: S, value: V) -> ObjectBuilder
where
S: Into<String>,
V: ser::Serialize,
{
self.object.insert(
key.into(),
value::to_value(&value).expect("failed to serialize"),
);
self
}
/// Creates and passes an `ObjectBuilder` into a closure, then inserts the resulting array into
/// this object.
pub fn insert_array<S, F>(mut self, key: S, f: F) -> ObjectBuilder
where
S: Into<String>,
F: FnOnce(ArrayBuilder) -> ArrayBuilder,
{
let builder = ArrayBuilder::new();
self.object.insert(key.into(), f(builder).unwrap());
self
}
/// Creates and passes an `ObjectBuilder` into a closure, then inserts the resulting object into
/// this object.
pub fn insert_object<S, F>(mut self, key: S, f: F) -> ObjectBuilder
where
S: Into<String>,
F: FnOnce(ObjectBuilder) -> ObjectBuilder,
{
let builder = ObjectBuilder::new();
self.object.insert(key.into(), f(builder).unwrap());
self
}
}

833
crates/nu-json/src/de.rs Normal file
View File

@ -0,0 +1,833 @@
//! Hjson Deserialization
//!
//! This module provides for Hjson deserialization with the type `Deserializer`.
use std::char;
use std::io;
use std::marker::PhantomData;
use std::str;
use serde::de;
use super::error::{Error, ErrorCode, Result};
use super::util::StringReader;
use super::util::{Number, ParseNumber};
enum State {
Normal,
Root,
Keyname,
}
/// A structure that deserializes Hjson into Rust values.
pub struct Deserializer<Iter: Iterator<Item = u8>> {
rdr: StringReader<Iter>,
str_buf: Vec<u8>,
state: State,
}
// macro_rules! try_or_invalid {
// ($self_:expr, $e:expr) => {
// match $e {
// Some(v) => v,
// None => { return Err($self_.error(ErrorCode::InvalidNumber)); }
// }
// }
// }
impl<Iter> Deserializer<Iter>
where
Iter: Iterator<Item = u8>,
{
/// Creates the Hjson parser from an `std::iter::Iterator`.
#[inline]
pub fn new(rdr: Iter) -> Deserializer<Iter> {
Deserializer {
rdr: StringReader::new(rdr),
str_buf: Vec::with_capacity(128),
state: State::Normal,
}
}
/// Creates the Hjson parser from an `std::iter::Iterator`.
#[inline]
pub fn new_for_root(rdr: Iter) -> Deserializer<Iter> {
let mut res = Deserializer::new(rdr);
res.state = State::Root;
res
}
/// The `Deserializer::end` method should be called after a value has been fully deserialized.
/// This allows the `Deserializer` to validate that the input stream is at the end or that it
/// only has trailing whitespace.
#[inline]
pub fn end(&mut self) -> Result<()> {
self.rdr.parse_whitespace()?;
if self.rdr.eof()? {
Ok(())
} else {
Err(self.rdr.error(ErrorCode::TrailingCharacters))
}
}
fn is_punctuator_char(&mut self, ch: u8) -> bool {
matches!(ch, b'{' | b'}' | b'[' | b']' | b',' | b':')
}
fn parse_keyname<'de, V>(&mut self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
// quotes for keys are optional in Hjson
// unless they include {}[],: or whitespace.
// assume whitespace was already eaten
self.str_buf.clear();
let mut space: Option<usize> = None;
loop {
let ch = self.rdr.next_char_or_null()?;
if ch == b':' {
if self.str_buf.is_empty() {
return Err(self.rdr.error(ErrorCode::Custom(
"Found ':' but no key name (for an empty key name use quotes)".to_string(),
)));
} else if space.is_some()
&& space.expect("Internal error: json parsing") != self.str_buf.len()
{
return Err(self.rdr.error(ErrorCode::Custom(
"Found whitespace in your key name (use quotes to include)".to_string(),
)));
}
self.rdr.uneat_char(ch);
let s = str::from_utf8(&self.str_buf).expect("Internal error: json parsing");
return visitor.visit_str(s);
} else if ch <= b' ' {
if ch == 0 {
return Err(self.rdr.error(ErrorCode::EofWhileParsingObject));
} else if space.is_none() {
space = Some(self.str_buf.len());
}
} else if self.is_punctuator_char(ch) {
return Err(self.rdr.error(ErrorCode::Custom("Found a punctuator where a key name was expected (check your syntax or use quotes if the key name includes {}[],: or whitespace)".to_string())));
} else {
self.str_buf.push(ch);
}
}
}
fn parse_value<'de, V>(&mut self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
self.rdr.parse_whitespace()?;
if self.rdr.eof()? {
return Err(self.rdr.error(ErrorCode::EofWhileParsingValue));
}
match self.state {
State::Keyname => {
self.state = State::Normal;
return self.parse_keyname(visitor);
}
State::Root => {
self.state = State::Normal;
return self.visit_map(true, visitor);
}
_ => {}
}
match self.rdr.peek_or_null()? {
/*
b'-' => {
self.rdr.eat_char();
self.parse_integer(false, visitor)
}
b'0' ... b'9' => {
self.parse_integer(true, visitor)
}
*/
b'"' => {
self.rdr.eat_char();
self.parse_string()?;
let s = str::from_utf8(&self.str_buf).expect("Internal error: json parsing");
visitor.visit_str(s)
}
b'[' => {
self.rdr.eat_char();
let ret = visitor.visit_seq(SeqVisitor::new(self))?;
self.rdr.parse_whitespace()?;
match self.rdr.next_char()? {
Some(b']') => Ok(ret),
Some(_) => Err(self.rdr.error(ErrorCode::TrailingCharacters)),
None => Err(self.rdr.error(ErrorCode::EofWhileParsingList)),
}
}
b'{' => {
self.rdr.eat_char();
self.visit_map(false, visitor)
}
b'\x00' => Err(self.rdr.error(ErrorCode::ExpectedSomeValue)),
_ => self.parse_tfnns(visitor),
}
}
fn visit_map<'de, V>(&mut self, root: bool, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let ret = visitor.visit_map(MapVisitor::new(self, root))?;
self.rdr.parse_whitespace()?;
match self.rdr.next_char()? {
Some(b'}') => {
if !root {
Ok(ret)
} else {
Err(self.rdr.error(ErrorCode::TrailingCharacters))
} // todo
}
Some(_) => Err(self.rdr.error(ErrorCode::TrailingCharacters)),
None => {
if root {
Ok(ret)
} else {
Err(self.rdr.error(ErrorCode::EofWhileParsingObject))
}
}
}
}
fn parse_ident(&mut self, ident: &[u8]) -> Result<()> {
for c in ident {
if Some(*c) != self.rdr.next_char()? {
return Err(self.rdr.error(ErrorCode::ExpectedSomeIdent));
}
}
Ok(())
}
fn parse_tfnns<'de, V>(&mut self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
// Hjson strings can be quoteless
// returns string, true, false, or null.
self.str_buf.clear();
let first = self.rdr.peek()?.expect("Internal error: json parsing");
if self.is_punctuator_char(first) {
return Err(self.rdr.error(ErrorCode::PunctuatorInQlString));
}
loop {
let ch = self.rdr.next_char_or_null()?;
let is_eol = ch == b'\r' || ch == b'\n' || ch == b'\x00';
let is_comment = ch == b'#'
|| if ch == b'/' {
let next = self.rdr.peek_or_null()?;
next == b'/' || next == b'*'
} else {
false
};
if is_eol || is_comment || ch == b',' || ch == b'}' || ch == b']' {
let chf = self.str_buf[0];
match chf {
b'f' => {
if str::from_utf8(&self.str_buf)
.expect("Internal error: json parsing")
.trim()
== "false"
{
self.rdr.uneat_char(ch);
return visitor.visit_bool(false);
}
}
b'n' => {
if str::from_utf8(&self.str_buf)
.expect("Internal error: json parsing")
.trim()
== "null"
{
self.rdr.uneat_char(ch);
return visitor.visit_unit();
}
}
b't' => {
if str::from_utf8(&self.str_buf)
.expect("Internal error: json parsing")
.trim()
== "true"
{
self.rdr.uneat_char(ch);
return visitor.visit_bool(true);
}
}
_ => {
if chf == b'-' || (b'0'..=b'9').contains(&chf) {
let mut pn = ParseNumber::new(self.str_buf.iter().copied());
match pn.parse(false) {
Ok(Number::F64(v)) => {
self.rdr.uneat_char(ch);
return visitor.visit_f64(v);
}
Ok(Number::U64(v)) => {
self.rdr.uneat_char(ch);
return visitor.visit_u64(v);
}
Ok(Number::I64(v)) => {
self.rdr.uneat_char(ch);
return visitor.visit_i64(v);
}
Err(_) => {} // not a number, continue
}
}
}
}
if is_eol {
// remove any whitespace at the end (ignored in quoteless strings)
return visitor.visit_str(
str::from_utf8(&self.str_buf)
.expect("Internal error: json parsing")
.trim(),
);
}
}
self.str_buf.push(ch);
if self.str_buf == b"'''" {
return self.parse_ml_string(visitor);
}
}
}
fn decode_hex_escape(&mut self) -> Result<u16> {
let mut i = 0;
let mut n = 0u16;
while i < 4 && !self.rdr.eof()? {
n = match self.rdr.next_char_or_null()? {
c @ b'0'..=b'9' => n * 16_u16 + ((c as u16) - (b'0' as u16)),
b'a' | b'A' => n * 16_u16 + 10_u16,
b'b' | b'B' => n * 16_u16 + 11_u16,
b'c' | b'C' => n * 16_u16 + 12_u16,
b'd' | b'D' => n * 16_u16 + 13_u16,
b'e' | b'E' => n * 16_u16 + 14_u16,
b'f' | b'F' => n * 16_u16 + 15_u16,
_ => {
return Err(self.rdr.error(ErrorCode::InvalidEscape));
}
};
i += 1;
}
// Error out if we didn't parse 4 digits.
if i != 4 {
return Err(self.rdr.error(ErrorCode::InvalidEscape));
}
Ok(n)
}
fn ml_skip_white(&mut self) -> Result<bool> {
match self.rdr.peek_or_null()? {
b' ' | b'\t' | b'\r' => {
self.rdr.eat_char();
Ok(true)
}
_ => Ok(false),
}
}
fn ml_skip_indent(&mut self, indent: usize) -> Result<()> {
let mut skip = indent;
while self.ml_skip_white()? && skip > 0 {
skip -= 1;
}
Ok(())
}
fn parse_ml_string<'de, V>(&mut self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
self.str_buf.clear();
// Parse a multiline string value.
let mut triple = 0;
// we are at ''' +1 - get indent
let (_, col) = self.rdr.pos();
let indent = col - 4;
// skip white/to (newline)
while self.ml_skip_white()? {}
if self.rdr.peek_or_null()? == b'\n' {
self.rdr.eat_char();
self.ml_skip_indent(indent)?;
}
// When parsing multiline string values, we must look for ' characters.
loop {
if self.rdr.eof()? {
return Err(self.rdr.error(ErrorCode::EofWhileParsingString));
} // todo error("Bad multiline string");
let ch = self.rdr.next_char_or_null()?;
if ch == b'\'' {
triple += 1;
if triple == 3 {
if self.str_buf.last() == Some(&b'\n') {
self.str_buf.pop();
}
let res = str::from_utf8(&self.str_buf).expect("Internal error: json parsing");
//todo if (self.str_buf.slice(-1) === '\n') self.str_buf=self.str_buf.slice(0, -1); // remove last EOL
return visitor.visit_str(res);
} else {
continue;
}
}
while triple > 0 {
self.str_buf.push(b'\'');
triple -= 1;
}
if ch != b'\r' {
self.str_buf.push(ch);
}
if ch == b'\n' {
self.ml_skip_indent(indent)?;
}
}
}
fn parse_string(&mut self) -> Result<()> {
self.str_buf.clear();
loop {
let ch = match self.rdr.next_char()? {
Some(ch) => ch,
None => {
return Err(self.rdr.error(ErrorCode::EofWhileParsingString));
}
};
match ch {
b'"' => {
return Ok(());
}
b'\\' => {
let ch = match self.rdr.next_char()? {
Some(ch) => ch,
None => {
return Err(self.rdr.error(ErrorCode::EofWhileParsingString));
}
};
match ch {
b'"' => self.str_buf.push(b'"'),
b'\\' => self.str_buf.push(b'\\'),
b'/' => self.str_buf.push(b'/'),
b'b' => self.str_buf.push(b'\x08'),
b'f' => self.str_buf.push(b'\x0c'),
b'n' => self.str_buf.push(b'\n'),
b'r' => self.str_buf.push(b'\r'),
b't' => self.str_buf.push(b'\t'),
b'u' => {
let c = match self.decode_hex_escape()? {
0xDC00..=0xDFFF => {
return Err(self
.rdr
.error(ErrorCode::LoneLeadingSurrogateInHexEscape));
}
// Non-BMP characters are encoded as a sequence of
// two hex escapes, representing UTF-16 surrogates.
n1 @ 0xD800..=0xDBFF => {
match (self.rdr.next_char()?, self.rdr.next_char()?) {
(Some(b'\\'), Some(b'u')) => (),
_ => {
return Err(self
.rdr
.error(ErrorCode::UnexpectedEndOfHexEscape));
}
}
let n2 = self.decode_hex_escape()?;
if !(0xDC00..=0xDFFF).contains(&n2) {
return Err(self
.rdr
.error(ErrorCode::LoneLeadingSurrogateInHexEscape));
}
let n = (((n1 - 0xD800) as u32) << 10 | (n2 - 0xDC00) as u32)
+ 0x1_0000;
match char::from_u32(n as u32) {
Some(c) => c,
None => {
return Err(self
.rdr
.error(ErrorCode::InvalidUnicodeCodePoint));
}
}
}
n => match char::from_u32(n as u32) {
Some(c) => c,
None => {
return Err(self
.rdr
.error(ErrorCode::InvalidUnicodeCodePoint));
}
},
};
self.str_buf.extend(c.encode_utf8(&mut [0; 4]).as_bytes());
}
_ => {
return Err(self.rdr.error(ErrorCode::InvalidEscape));
}
}
}
ch => {
self.str_buf.push(ch);
}
}
}
}
fn parse_object_colon(&mut self) -> Result<()> {
self.rdr.parse_whitespace()?;
match self.rdr.next_char()? {
Some(b':') => Ok(()),
Some(_) => Err(self.rdr.error(ErrorCode::ExpectedColon)),
None => Err(self.rdr.error(ErrorCode::EofWhileParsingObject)),
}
}
}
impl<'de, 'a, Iter> de::Deserializer<'de> for &'a mut Deserializer<Iter>
where
Iter: Iterator<Item = u8>,
{
type Error = Error;
#[inline]
fn deserialize_any<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
if let State::Root = self.state {}
self.parse_value(visitor)
}
/// Parses a `null` as a None, and any other values as a `Some(...)`.
#[inline]
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
self.rdr.parse_whitespace()?;
match self.rdr.peek_or_null()? {
b'n' => {
self.rdr.eat_char();
self.parse_ident(b"ull")?;
visitor.visit_none()
}
_ => visitor.visit_some(self),
}
}
/// Parses a newtype struct as the underlying value.
#[inline]
fn deserialize_newtype_struct<V>(self, _name: &str, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
visitor.visit_newtype_struct(self)
}
serde::forward_to_deserialize_any! {
bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
bytes byte_buf unit unit_struct seq tuple map
tuple_struct struct enum identifier ignored_any
}
}
struct SeqVisitor<'a, Iter: 'a + Iterator<Item = u8>> {
de: &'a mut Deserializer<Iter>,
}
impl<'a, Iter: Iterator<Item = u8>> SeqVisitor<'a, Iter> {
fn new(de: &'a mut Deserializer<Iter>) -> Self {
SeqVisitor { de }
}
}
impl<'de, 'a, Iter> de::SeqAccess<'de> for SeqVisitor<'a, Iter>
where
Iter: Iterator<Item = u8>,
{
type Error = Error;
fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>>
where
T: de::DeserializeSeed<'de>,
{
self.de.rdr.parse_whitespace()?;
match self.de.rdr.peek()? {
Some(b']') => {
return Ok(None);
}
Some(_) => {}
None => {
return Err(self.de.rdr.error(ErrorCode::EofWhileParsingList));
}
}
let value = seed.deserialize(&mut *self.de)?;
// in Hjson the comma is optional and trailing commas are allowed
self.de.rdr.parse_whitespace()?;
if self.de.rdr.peek()? == Some(b',') {
self.de.rdr.eat_char();
self.de.rdr.parse_whitespace()?;
}
Ok(Some(value))
}
}
struct MapVisitor<'a, Iter: 'a + Iterator<Item = u8>> {
de: &'a mut Deserializer<Iter>,
first: bool,
root: bool,
}
impl<'a, Iter: Iterator<Item = u8>> MapVisitor<'a, Iter> {
fn new(de: &'a mut Deserializer<Iter>, root: bool) -> Self {
MapVisitor {
de,
first: true,
root,
}
}
}
impl<'de, 'a, Iter> de::MapAccess<'de> for MapVisitor<'a, Iter>
where
Iter: Iterator<Item = u8>,
{
type Error = Error;
fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>>
where
K: de::DeserializeSeed<'de>,
{
self.de.rdr.parse_whitespace()?;
if self.first {
self.first = false;
} else if self.de.rdr.peek()? == Some(b',') {
// in Hjson the comma is optional and trailing commas are allowed
self.de.rdr.eat_char();
self.de.rdr.parse_whitespace()?;
}
match self.de.rdr.peek()? {
Some(b'}') => return Ok(None), // handled later for root
Some(_) => {}
None => {
if self.root {
return Ok(None);
} else {
return Err(self.de.rdr.error(ErrorCode::EofWhileParsingObject));
}
}
}
match self.de.rdr.peek()? {
Some(ch) => {
self.de.state = if ch == b'"' {
State::Normal
} else {
State::Keyname
};
Ok(Some(seed.deserialize(&mut *self.de)?))
}
None => Err(self.de.rdr.error(ErrorCode::EofWhileParsingValue)),
}
}
fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value>
where
V: de::DeserializeSeed<'de>,
{
self.de.parse_object_colon()?;
seed.deserialize(&mut *self.de)
}
}
impl<'de, 'a, Iter> de::VariantAccess<'de> for &'a mut Deserializer<Iter>
where
Iter: Iterator<Item = u8>,
{
type Error = Error;
fn unit_variant(self) -> Result<()> {
de::Deserialize::deserialize(self)
}
fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value>
where
T: de::DeserializeSeed<'de>,
{
seed.deserialize(self)
}
fn tuple_variant<V>(self, _len: usize, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
de::Deserializer::deserialize_any(self, visitor)
}
fn struct_variant<V>(self, _fields: &'static [&'static str], visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
de::Deserializer::deserialize_any(self, visitor)
}
}
//////////////////////////////////////////////////////////////////////////////
/// Iterator that deserializes a stream into multiple Hjson values.
pub struct StreamDeserializer<T, Iter>
where
Iter: Iterator<Item = u8>,
T: de::DeserializeOwned,
{
deser: Deserializer<Iter>,
_marker: PhantomData<T>,
}
impl<T, Iter> StreamDeserializer<T, Iter>
where
Iter: Iterator<Item = u8>,
T: de::DeserializeOwned,
{
/// Returns an `Iterator` of decoded Hjson values from an iterator over
/// `Iterator<Item=u8>`.
pub fn new(iter: Iter) -> StreamDeserializer<T, Iter> {
StreamDeserializer {
deser: Deserializer::new(iter),
_marker: PhantomData,
}
}
}
impl<T, Iter> Iterator for StreamDeserializer<T, Iter>
where
Iter: Iterator<Item = u8>,
T: de::DeserializeOwned,
{
type Item = Result<T>;
fn next(&mut self) -> Option<Result<T>> {
// skip whitespaces, if any
// this helps with trailing whitespaces, since whitespaces between
// values are handled for us.
if let Err(e) = self.deser.rdr.parse_whitespace() {
return Some(Err(e));
};
match self.deser.rdr.eof() {
Ok(true) => None,
Ok(false) => match de::Deserialize::deserialize(&mut self.deser) {
Ok(v) => Some(Ok(v)),
Err(e) => Some(Err(e)),
},
Err(e) => Some(Err(e)),
}
}
}
//////////////////////////////////////////////////////////////////////////////
/// Decodes a Hjson value from an iterator over an iterator
/// `Iterator<Item=u8>`.
pub fn from_iter<I, T>(iter: I) -> Result<T>
where
I: Iterator<Item = io::Result<u8>>,
T: de::DeserializeOwned,
{
let fold: io::Result<Vec<_>> = iter.collect();
if let Err(e) = fold {
return Err(Error::Io(e));
}
let bytes = fold.expect("Internal error: json parsing");
// deserialize tries first to decode with legacy support (new_for_root)
// and then with the standard method if this fails.
// todo: add compile switch
// deserialize and make sure the whole stream has been consumed
let mut de = Deserializer::new_for_root(bytes.iter().copied());
de::Deserialize::deserialize(&mut de)
.and_then(|x| de.end().map(|()| x))
.or_else(|_| {
let mut de2 = Deserializer::new(bytes.iter().copied());
de::Deserialize::deserialize(&mut de2).and_then(|x| de2.end().map(|()| x))
})
/* without legacy support:
// deserialize and make sure the whole stream has been consumed
let mut de = Deserializer::new(bytes.iter().map(|b| *b));
let value = match de::Deserialize::deserialize(&mut de)
.and_then(|x| { try!(de.end()); Ok(x) })
{
Ok(v) => Ok(v),
Err(e) => Err(e),
};
*/
}
/// Decodes a Hjson value from a `std::io::Read`.
pub fn from_reader<R, T>(rdr: R) -> Result<T>
where
R: io::Read,
T: de::DeserializeOwned,
{
from_iter(rdr.bytes())
}
/// Decodes a Hjson value from a byte slice `&[u8]`.
pub fn from_slice<T>(v: &[u8]) -> Result<T>
where
T: de::DeserializeOwned,
{
from_iter(v.iter().map(|&byte| Ok(byte)))
}
/// Decodes a Hjson value from a `&str`.
pub fn from_str<T>(s: &str) -> Result<T>
where
T: de::DeserializeOwned,
{
from_slice(s.as_bytes())
}

166
crates/nu-json/src/error.rs Normal file
View File

@ -0,0 +1,166 @@
//! JSON Errors
//!
//! This module is centered around the `Error` and `ErrorCode` types, which represents all possible
//! `serde_hjson` errors.
use std::error;
use std::fmt;
use std::io;
use std::result;
use std::string::FromUtf8Error;
use serde::de;
use serde::ser;
/// The errors that can arise while parsing a JSON stream.
#[derive(Clone, PartialEq)]
pub enum ErrorCode {
/// Catchall for syntax error messages
Custom(String),
/// EOF while parsing a list.
EofWhileParsingList,
/// EOF while parsing an object.
EofWhileParsingObject,
/// EOF while parsing a string.
EofWhileParsingString,
/// EOF while parsing a JSON value.
EofWhileParsingValue,
/// Expected this character to be a `':'`.
ExpectedColon,
/// Expected this character to be either a `','` or a `]`.
ExpectedListCommaOrEnd,
/// Expected this character to be either a `','` or a `}`.
ExpectedObjectCommaOrEnd,
/// Expected to parse either a `true`, `false`, or a `null`.
ExpectedSomeIdent,
/// Expected this character to start a JSON value.
ExpectedSomeValue,
/// Invalid hex escape code.
InvalidEscape,
/// Invalid number.
InvalidNumber,
/// Invalid Unicode code point.
InvalidUnicodeCodePoint,
/// Object key is not a string.
KeyMustBeAString,
/// Lone leading surrogate in hex escape.
LoneLeadingSurrogateInHexEscape,
/// JSON has non-whitespace trailing characters after the value.
TrailingCharacters,
/// Unexpected end of hex escape.
UnexpectedEndOfHexEscape,
/// Found a punctuator character when expecting a quoteless string.
PunctuatorInQlString,
}
impl fmt::Debug for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
//use std::fmt::Debug;
match *self {
ErrorCode::Custom(ref msg) => write!(f, "{}", msg),
ErrorCode::EofWhileParsingList => "EOF while parsing a list".fmt(f),
ErrorCode::EofWhileParsingObject => "EOF while parsing an object".fmt(f),
ErrorCode::EofWhileParsingString => "EOF while parsing a string".fmt(f),
ErrorCode::EofWhileParsingValue => "EOF while parsing a value".fmt(f),
ErrorCode::ExpectedColon => "expected `:`".fmt(f),
ErrorCode::ExpectedListCommaOrEnd => "expected `,` or `]`".fmt(f),
ErrorCode::ExpectedObjectCommaOrEnd => "expected `,` or `}`".fmt(f),
ErrorCode::ExpectedSomeIdent => "expected ident".fmt(f),
ErrorCode::ExpectedSomeValue => "expected value".fmt(f),
ErrorCode::InvalidEscape => "invalid escape".fmt(f),
ErrorCode::InvalidNumber => "invalid number".fmt(f),
ErrorCode::InvalidUnicodeCodePoint => "invalid Unicode code point".fmt(f),
ErrorCode::KeyMustBeAString => "key must be a string".fmt(f),
ErrorCode::LoneLeadingSurrogateInHexEscape => {
"lone leading surrogate in hex escape".fmt(f)
}
ErrorCode::TrailingCharacters => "trailing characters".fmt(f),
ErrorCode::UnexpectedEndOfHexEscape => "unexpected end of hex escape".fmt(f),
ErrorCode::PunctuatorInQlString => {
"found a punctuator character when expecting a quoteless string".fmt(f)
}
}
}
}
/// This type represents all possible errors that can occur when serializing or deserializing a
/// value into JSON.
#[derive(Debug)]
pub enum Error {
/// The JSON value had some syntactic error.
Syntax(ErrorCode, usize, usize),
/// Some IO error occurred when serializing or deserializing a value.
Io(io::Error),
/// Some UTF8 error occurred while serializing or deserializing a value.
FromUtf8(FromUtf8Error),
}
impl error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> {
match *self {
Error::Io(ref error) => Some(error),
Error::FromUtf8(ref error) => Some(error),
_ => None,
}
}
}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::Syntax(ref code, line, col) => {
write!(fmt, "{:?} at line {} column {}", code, line, col)
}
Error::Io(ref error) => fmt::Display::fmt(error, fmt),
Error::FromUtf8(ref error) => fmt::Display::fmt(error, fmt),
}
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Error {
Error::Io(error)
}
}
impl From<FromUtf8Error> for Error {
fn from(error: FromUtf8Error) -> Error {
Error::FromUtf8(error)
}
}
impl de::Error for Error {
fn custom<T: fmt::Display>(msg: T) -> Error {
Error::Syntax(ErrorCode::Custom(msg.to_string()), 0, 0)
}
}
impl ser::Error for Error {
/// Raised when there is general error when deserializing a type.
fn custom<T: fmt::Display>(msg: T) -> Error {
Error::Syntax(ErrorCode::Custom(msg.to_string()), 0, 0)
}
}
/// Helper alias for `Result` objects that return a JSON `Error`.
pub type Result<T> = result::Result<T, Error>;

13
crates/nu-json/src/lib.rs Normal file
View File

@ -0,0 +1,13 @@
pub use self::de::{
from_iter, from_reader, from_slice, from_str, Deserializer, StreamDeserializer,
};
pub use self::error::{Error, ErrorCode, Result};
pub use self::ser::{to_string, to_vec, to_writer, Serializer};
pub use self::value::{from_value, to_value, Map, Value};
pub mod builder;
pub mod de;
pub mod error;
pub mod ser;
mod util;
pub mod value;

1020
crates/nu-json/src/ser.rs Normal file

File diff suppressed because it is too large Load Diff

333
crates/nu-json/src/util.rs Normal file
View File

@ -0,0 +1,333 @@
use std::io;
use std::str;
use super::error::{Error, ErrorCode, Result};
pub struct StringReader<Iter: Iterator<Item = u8>> {
iter: Iter,
line: usize,
col: usize,
ch: Vec<u8>,
}
impl<Iter> StringReader<Iter>
where
Iter: Iterator<Item = u8>,
{
#[inline]
pub fn new(iter: Iter) -> Self {
StringReader {
iter,
line: 1,
col: 0,
ch: Vec::new(),
}
}
fn next(&mut self) -> Option<io::Result<u8>> {
match self.iter.next() {
None => None,
Some(b'\n') => {
self.line += 1;
self.col = 0;
Some(Ok(b'\n'))
}
Some(c) => {
self.col += 1;
Some(Ok(c))
}
}
}
pub fn pos(&mut self) -> (usize, usize) {
(self.line, self.col)
}
pub fn eof(&mut self) -> Result<bool> {
Ok(self.peek()?.is_none())
}
pub fn peek_next(&mut self, idx: usize) -> Result<Option<u8>> {
while self.ch.len() <= idx {
match self.next() {
Some(Err(err)) => return Err(Error::Io(err)),
Some(Ok(ch)) => self.ch.push(ch),
None => return Ok(None),
}
}
Ok(Some(self.ch[idx]))
}
// pub fn peek_next_or_null(&mut self, idx: usize) -> Result<u8> {
// Ok(try!(self.peek_next(idx)).unwrap_or(b'\x00'))
// }
pub fn peek(&mut self) -> Result<Option<u8>> {
self.peek_next(0)
}
pub fn peek_or_null(&mut self) -> Result<u8> {
Ok(self.peek()?.unwrap_or(b'\x00'))
}
pub fn eat_char(&mut self) -> u8 {
self.ch.remove(0)
}
pub fn uneat_char(&mut self, ch: u8) {
self.ch.insert(0, ch);
}
pub fn next_char(&mut self) -> Result<Option<u8>> {
match self.ch.first() {
Some(&ch) => {
self.eat_char();
Ok(Some(ch))
}
None => match self.next() {
Some(Err(err)) => Err(Error::Io(err)),
Some(Ok(ch)) => Ok(Some(ch)),
None => Ok(None),
},
}
}
pub fn next_char_or_null(&mut self) -> Result<u8> {
Ok(self.next_char()?.unwrap_or(b'\x00'))
}
fn eat_line(&mut self) -> Result<()> {
loop {
match self.peek()? {
Some(b'\n') | None => return Ok(()),
_ => {}
}
self.eat_char();
}
}
pub fn parse_whitespace(&mut self) -> Result<()> {
loop {
match self.peek_or_null()? {
b' ' | b'\n' | b'\t' | b'\r' => {
self.eat_char();
}
b'#' => self.eat_line()?,
b'/' => {
match self.peek_next(1)? {
Some(b'/') => self.eat_line()?,
Some(b'*') => {
self.eat_char();
self.eat_char();
while !(self.peek()?.unwrap_or(b'*') == b'*'
&& self.peek_next(1)?.unwrap_or(b'/') == b'/')
{
self.eat_char();
}
self.eat_char();
self.eat_char();
}
Some(_) => {
self.eat_char();
}
None => return Err(self.error(ErrorCode::TrailingCharacters)), //todo
}
}
_ => {
return Ok(());
}
}
}
}
pub fn error(&mut self, reason: ErrorCode) -> Error {
Error::Syntax(reason, self.line, self.col)
}
}
pub enum Number {
I64(i64),
U64(u64),
F64(f64),
}
pub struct ParseNumber<Iter: Iterator<Item = u8>> {
rdr: StringReader<Iter>,
result: Vec<u8>,
}
// macro_rules! try_or_invalid {
// ($e:expr) => {
// match $e {
// Some(v) => v,
// None => { return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0)); }
// }
// }
// }
impl<Iter: Iterator<Item = u8>> ParseNumber<Iter> {
#[inline]
pub fn new(iter: Iter) -> Self {
ParseNumber {
rdr: StringReader::new(iter),
result: Vec::new(),
}
}
pub fn parse(&mut self, stop_at_next: bool) -> Result<Number> {
match self.try_parse() {
Ok(()) => {
self.rdr.parse_whitespace()?;
let mut ch = self.rdr.next_char_or_null()?;
if stop_at_next {
let ch2 = self.rdr.peek_or_null()?;
// end scan if we find a punctuator character like ,}] or a comment
if ch == b','
|| ch == b'}'
|| ch == b']'
|| ch == b'#'
|| ch == b'/' && (ch2 == b'/' || ch2 == b'*')
{
ch = b'\x00';
}
}
match ch {
b'\x00' => {
let res =
str::from_utf8(&self.result).expect("Internal error: json parsing");
let mut is_float = false;
for ch in res.chars() {
if ch == '.' || ch == 'e' || ch == 'E' {
is_float = true;
break;
}
}
if is_float {
Ok(Number::F64(
res.parse::<f64>().expect("Internal error: json parsing"),
))
} else if res.starts_with('-') {
Ok(Number::I64(
res.parse::<i64>().expect("Internal error: json parsing"),
))
} else {
Ok(Number::U64(
res.parse::<u64>().expect("Internal error: json parsing"),
))
}
}
_ => Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0)),
}
}
Err(e) => Err(e),
}
}
fn try_parse(&mut self) -> Result<()> {
if self.rdr.peek_or_null()? == b'-' {
self.result.push(self.rdr.eat_char());
}
let mut has_value = false;
if self.rdr.peek_or_null()? == b'0' {
self.result.push(self.rdr.eat_char());
has_value = true;
// There can be only one leading '0'.
if let b'0'..=b'9' = self.rdr.peek_or_null()? {
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
}
}
loop {
match self.rdr.peek_or_null()? {
b'0'..=b'9' => {
self.result.push(self.rdr.eat_char());
has_value = true;
}
b'.' => {
if !has_value {
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
}
self.rdr.eat_char();
return self.try_decimal();
}
b'e' | b'E' => {
if !has_value {
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
}
self.rdr.eat_char();
return self.try_exponent();
}
_ => {
if !has_value {
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
}
return Ok(());
}
}
}
}
fn try_decimal(&mut self) -> Result<()> {
self.result.push(b'.');
// Make sure a digit follows the decimal place.
match self.rdr.next_char_or_null()? {
c @ b'0'..=b'9' => {
self.result.push(c);
}
_ => {
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
}
};
while let b'0'..=b'9' = self.rdr.peek_or_null()? {
self.result.push(self.rdr.eat_char());
}
match self.rdr.peek_or_null()? {
b'e' | b'E' => {
self.rdr.eat_char();
self.try_exponent()
}
_ => Ok(()),
}
}
fn try_exponent(&mut self) -> Result<()> {
self.result.push(b'e');
match self.rdr.peek_or_null()? {
b'+' => {
self.result.push(self.rdr.eat_char());
}
b'-' => {
self.result.push(self.rdr.eat_char());
}
_ => {}
};
// Make sure a digit follows the exponent place.
match self.rdr.next_char_or_null()? {
c @ b'0'..=b'9' => {
self.result.push(c);
}
_ => {
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
}
};
while let b'0'..=b'9' = self.rdr.peek_or_null()? {
self.result.push(self.rdr.eat_char());
}
Ok(())
}
}

1158
crates/nu-json/src/value.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,212 @@
// FIXME: re-enable tests
/*
use nu_json::Value;
use regex::Regex;
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
fn txt(text: &str) -> String {
let out = String::from_utf8_lossy(text.as_bytes());
#[cfg(windows)]
{
out.replace("\r\n", "").replace("\n", "")
}
#[cfg(not(windows))]
{
out.to_string()
}
}
fn hjson_expectations() -> PathBuf {
let assets = nu_test_support::fs::assets().join("nu_json");
nu_path::canonicalize(assets.clone()).unwrap_or_else(|e| {
panic!(
"Couldn't canonicalize hjson assets path {}: {:?}",
assets.display(),
e
)
})
}
fn get_test_content(name: &str) -> io::Result<String> {
let expectations = hjson_expectations();
let mut p = format!("{}/{}_test.hjson", expectations.display(), name);
if !Path::new(&p).exists() {
p = format!("{}/{}_test.json", expectations.display(), name);
}
fs::read_to_string(&p)
}
fn get_result_content(name: &str) -> io::Result<(String, String)> {
let expectations = hjson_expectations();
let p1 = format!("{}/{}_result.json", expectations.display(), name);
let p2 = format!("{}/{}_result.hjson", expectations.display(), name);
Ok((fs::read_to_string(&p1)?, fs::read_to_string(&p2)?))
}
macro_rules! run_test {
// {{ is a workaround for rust stable
($v: ident, $list: expr, $fix: expr) => {{
let name = stringify!($v);
$list.push(format!("{}_test", name));
println!("- running {}", name);
let should_fail = name.starts_with("fail");
let test_content = get_test_content(name).unwrap();
let data: nu_json::Result<Value> = nu_json::from_str(&test_content);
assert!(should_fail == data.is_err());
if !should_fail {
let udata = data.unwrap();
let (rjson, rhjson) = get_result_content(name).unwrap();
let rjson = txt(&rjson);
let rhjson = txt(&rhjson);
let actual_hjson = nu_json::to_string(&udata).unwrap();
let actual_hjson = txt(&actual_hjson);
let actual_json = $fix(serde_json::to_string_pretty(&udata).unwrap());
let actual_json = txt(&actual_json);
if rhjson != actual_hjson {
println!(
"{:?}\n---hjson expected\n{}\n---hjson actual\n{}\n---\n",
name, rhjson, actual_hjson
);
}
if rjson != actual_json {
println!(
"{:?}\n---json expected\n{}\n---json actual\n{}\n---\n",
name, rjson, actual_json
);
}
assert!(rhjson == actual_hjson && rjson == actual_json);
}
}};
}
// add fixes where rust's json differs from javascript
fn std_fix(json: String) -> String {
// serde_json serializes integers with a superfluous .0 suffix
let re = Regex::new(r"(?m)(?P<d>\d)\.0(?P<s>,?)$").unwrap();
re.replace_all(&json, "$d$s").to_string()
}
fn fix_kan(json: String) -> String {
std_fix(json).replace(" -0,", " 0,")
}
fn fix_pass1(json: String) -> String {
std_fix(json)
.replace("1.23456789e34", "1.23456789e+34")
.replace("2.3456789012e76", "2.3456789012e+76")
}
#[test]
fn test_hjson() {
let mut done: Vec<String> = Vec::new();
println!();
run_test!(charset, done, std_fix);
run_test!(comments, done, std_fix);
run_test!(empty, done, std_fix);
run_test!(failCharset1, done, std_fix);
run_test!(failJSON02, done, std_fix);
run_test!(failJSON05, done, std_fix);
run_test!(failJSON06, done, std_fix);
run_test!(failJSON07, done, std_fix);
run_test!(failJSON08, done, std_fix);
run_test!(failJSON10, done, std_fix);
run_test!(failJSON11, done, std_fix);
run_test!(failJSON12, done, std_fix);
run_test!(failJSON13, done, std_fix);
run_test!(failJSON14, done, std_fix);
run_test!(failJSON15, done, std_fix);
run_test!(failJSON16, done, std_fix);
run_test!(failJSON17, done, std_fix);
run_test!(failJSON19, done, std_fix);
run_test!(failJSON20, done, std_fix);
run_test!(failJSON21, done, std_fix);
run_test!(failJSON22, done, std_fix);
run_test!(failJSON23, done, std_fix);
run_test!(failJSON24, done, std_fix);
run_test!(failJSON26, done, std_fix);
run_test!(failJSON28, done, std_fix);
run_test!(failJSON29, done, std_fix);
run_test!(failJSON30, done, std_fix);
run_test!(failJSON31, done, std_fix);
run_test!(failJSON32, done, std_fix);
run_test!(failJSON33, done, std_fix);
run_test!(failJSON34, done, std_fix);
run_test!(failKey1, done, std_fix);
run_test!(failKey2, done, std_fix);
run_test!(failKey3, done, std_fix);
run_test!(failKey4, done, std_fix);
run_test!(failMLStr1, done, std_fix);
run_test!(failObj1, done, std_fix);
run_test!(failObj2, done, std_fix);
run_test!(failObj3, done, std_fix);
run_test!(failStr1a, done, std_fix);
run_test!(failStr1b, done, std_fix);
run_test!(failStr1c, done, std_fix);
run_test!(failStr1d, done, std_fix);
run_test!(failStr2a, done, std_fix);
run_test!(failStr2b, done, std_fix);
run_test!(failStr2c, done, std_fix);
run_test!(failStr2d, done, std_fix);
run_test!(failStr3a, done, std_fix);
run_test!(failStr3b, done, std_fix);
run_test!(failStr3c, done, std_fix);
run_test!(failStr3d, done, std_fix);
run_test!(failStr4a, done, std_fix);
run_test!(failStr4b, done, std_fix);
run_test!(failStr4c, done, std_fix);
run_test!(failStr4d, done, std_fix);
run_test!(failStr5a, done, std_fix);
run_test!(failStr5b, done, std_fix);
run_test!(failStr5c, done, std_fix);
run_test!(failStr5d, done, std_fix);
run_test!(failStr6a, done, std_fix);
run_test!(failStr6b, done, std_fix);
run_test!(failStr6c, done, std_fix);
run_test!(failStr6d, done, std_fix);
run_test!(kan, done, fix_kan);
run_test!(keys, done, std_fix);
run_test!(oa, done, std_fix);
run_test!(pass1, done, fix_pass1);
run_test!(pass2, done, std_fix);
run_test!(pass3, done, std_fix);
run_test!(pass4, done, std_fix);
run_test!(passSingle, done, std_fix);
run_test!(root, done, std_fix);
run_test!(stringify1, done, std_fix);
run_test!(strings, done, std_fix);
run_test!(trail, done, std_fix);
// check if we include all assets
let paths = fs::read_dir(hjson_expectations()).unwrap();
let all = paths
.map(|item| String::from(item.unwrap().path().file_stem().unwrap().to_str().unwrap()))
.filter(|x| x.contains("_test"));
let missing = all
.into_iter()
.filter(|x| done.iter().find(|y| &x == y) == None)
.collect::<Vec<String>>();
if !missing.is_empty() {
for item in missing {
println!("missing: {}", item);
}
panic!();
}
}
*/

View File

@ -4,5 +4,6 @@ version = "0.1.0"
edition = "2018"
[dependencies]
codespan-reporting = "0.11.1"
nu-protocol = { path = "../nu-protocol"}
miette = "3.0.0"
thiserror = "1.0.29"
nu-protocol = { path = "../nu-protocol"}

View File

@ -0,0 +1,99 @@
# nu-parser, the Nushell parser
Nushell's parser is a type-directed parser, meaning that the parser will use type information available during parse time to configure the parser. This allows it to handle a broader range of techniques to handle the arguments of a command.
Nushell's base language is whitespace-separated tokens with the command (Nushell's term for a function) name in the head position:
```
head1 arg1 arg2 | head2
```
## Lexing
The first job of the parser is to a lexical analysis to find where the tokens start and end in the input. This turns the above into:
```
<item: "head1">, <item: "arg1">, <item: "arg2">, <pipe>, <item: "head2">
```
At this point, the parser has little to no understanding of the shape of the command or how to parse its arguments.
## Lite parsing
As nushell is a language of pipelines, pipes form a key role in both separating commands from each other as well as denoting the flow of information between commands. The lite parse phase, as the name suggests, helps to group the lexed tokens into units.
The above tokens are converted the following during the lite parse phase:
```
Pipeline:
Command #1:
<item: "head1">, <item: "arg1">, <item: "arg2">
Command #2:
<item: "head2">
```
## Parsing
The real magic begins to happen when the parse moves on to the parsing stage. At this point, it traverses the lite parse tree and for each command makes a decision:
* If the command looks like an internal/external command literal: eg) `foo` or `/usr/bin/ls`, it parses it as an internal or external command
* Otherwise, it parses the command as part of a mathematical expression
### Types/shapes
Each command has a shape assigned to each of the arguments in reads in. These shapes help define how the parser will handle the parse.
For example, if the command is written as:
```sql
where $x > 10
```
When the parsing happens, the parser will look up the `where` command and find its Signature. The Signature states what flags are allowed and what positional arguments are allowed (both required and optional). Each argument comes with it a Shape that defines how to parse values to get that position.
In the above example, if the Signature of `where` said that it took three String values, the result would be:
```
CallInfo:
Name: `where`
Args:
Expression($x), a String
Expression(>), a String
Expression(10), a String
```
Or, the Signature could state that it takes in three positional arguments: a Variable, an Operator, and a Number, which would give:
```
CallInfo:
Name: `where`
Args:
Expression($x), a Variable
Expression(>), an Operator
Expression(10), a Number
```
Note that in this case, each would be checked at compile time to confirm that the expression has the shape requested. For example, `"foo"` would fail to parse as a Number.
Finally, some Shapes can consume more than one token. In the above, if the `where` command stated it took in a single required argument, and that the Shape of this argument was a MathExpression, then the parser would treat the remaining tokens as part of the math expression.
```
CallInfo:
Name: `where`
Args:
MathExpression:
Op: >
LHS: Expression($x)
RHS: Expression(10)
```
When the command runs, it will now be able to evaluate the whole math expression as a single step rather than doing any additional parsing to understand the relationship between the parameters.
## Making space
As some Shapes can consume multiple tokens, it's important that the parser allow for multiple Shapes to coexist as peacefully as possible.
The simplest way it does this is to ensure there is at least one token for each required parameter. If the Signature of the command says that it takes a MathExpression and a Number as two required arguments, then the parser will stop the math parser one token short. This allows the second Shape to consume the final token.
Another way that the parser makes space is to look for Keyword shapes in the Signature. A Keyword is a word that's special to this command. For example in the `if` command, `else` is a keyword. When it is found in the arguments, the parser will use it as a signpost for where to make space for each Shape. The tokens leading up to the `else` will then feed into the parts of the Signature before the `else`, and the tokens following are consumed by the `else` and the Shapes that follow.

View File

@ -1,31 +1,162 @@
use miette::Diagnostic;
use nu_protocol::{Span, Type};
use thiserror::Error;
#[derive(Debug)]
#[derive(Clone, Debug, Error, Diagnostic)]
pub enum ParseError {
ExtraTokens(Span),
ExtraPositional(Span),
UnexpectedEof(String, Span),
Unclosed(String, Span),
UnknownStatement(Span),
Expected(String, Span),
Mismatch(String, String, Span), // expected, found, span
UnsupportedOperation(Span, Span, Type, Span, Type),
ExpectedKeyword(String, Span),
MultipleRestParams(Span),
VariableNotFound(Span),
UnknownCommand(Span),
NonUtf8(Span),
UnknownFlag(Span),
UnknownType(Span),
MissingFlagParam(Span),
ShortFlagBatchCantTakeArg(Span),
MissingPositional(String, Span),
KeywordMissingArgument(String, Span),
MissingType(Span),
TypeMismatch(Type, Type, Span), // expected, found, span
MissingRequiredFlag(String, Span),
IncompleteMathExpression(Span),
UnknownState(String, Span),
IncompleteParser(Span),
RestNeedsName(Span),
/// The parser encountered unexpected tokens, when the code should have
/// finished. You should remove these or finish adding what you intended
/// to add.
#[error("Extra tokens in code.")]
#[diagnostic(
code(nu::parser::extra_tokens),
url(docsrs),
help("Try removing them.")
)]
ExtraTokens(#[label = "extra tokens"] Span),
#[error("Extra positional argument.")]
#[diagnostic(code(nu::parser::extra_positional), url(docsrs))]
ExtraPositional(#[label = "extra positional argument"] Span),
#[error("Unexpected end of code.")]
#[diagnostic(code(nu::parser::unexpected_eof), url(docsrs))]
UnexpectedEof(String, #[label("expected closing {0}")] Span),
#[error("Unclosed delimiter.")]
#[diagnostic(code(nu::parser::unclosed_delimiter), url(docsrs))]
Unclosed(String, #[label("unclosed {0}")] Span),
#[error("Unknown statement.")]
#[diagnostic(code(nu::parser::unknown_statement), url(docsrs))]
UnknownStatement(#[label("unknown statement")] Span),
#[error("Parse mismatch during operation.")]
#[diagnostic(code(nu::parser::parse_mismatch), url(docsrs))]
Expected(String, #[label("expected {0}")] Span),
#[error("Type mismatch during operation.")]
#[diagnostic(code(nu::parser::type_mismatch), url(docsrs))]
Mismatch(String, String, #[label("expected {0}, found {1}")] Span), // expected, found, span
#[error("Types mismatched for operation.")]
#[diagnostic(
code(nu::parser::unsupported_operation),
url(docsrs),
help("Change {2} or {4} to be the right types and try again.")
)]
UnsupportedOperation(
#[label = "doesn't support these values."] Span,
#[label("{2}")] Span,
Type,
#[label("{4}")] Span,
Type,
),
#[error("Expected keyword.")]
#[diagnostic(code(nu::parser::expected_keyword), url(docsrs))]
ExpectedKeyword(String, #[label("expected {0}")] Span),
#[error("Multiple rest params.")]
#[diagnostic(code(nu::parser::multiple_rest_params), url(docsrs))]
MultipleRestParams(#[label = "multiple rest params"] Span),
#[error("Variable not found.")]
#[diagnostic(code(nu::parser::variable_not_found), url(docsrs))]
VariableNotFound(#[label = "variable not found"] Span),
#[error("Module not found.")]
#[diagnostic(code(nu::parser::module_not_found), url(docsrs))]
ModuleNotFound(#[label = "module not found"] Span),
#[error("Unknown command.")]
#[diagnostic(
code(nu::parser::unknown_command),
url(docsrs),
// TODO: actual suggestions
// help("Did you mean `foo`?")
)]
UnknownCommand(#[label = "unknown command"] Span),
#[error("Non-UTF8 code.")]
#[diagnostic(code(nu::parser::non_utf8), url(docsrs))]
NonUtf8(#[label = "non-UTF8 code"] Span),
#[error("The `{0}` command doesn't have flag `{1}`.")]
#[diagnostic(code(nu::parser::unknown_flag), url(docsrs))]
UnknownFlag(String, String, #[label = "unknown flag"] Span),
#[error("Unknown type.")]
#[diagnostic(code(nu::parser::unknown_type), url(docsrs))]
UnknownType(#[label = "unknown type"] Span),
#[error("Missing flag param.")]
#[diagnostic(code(nu::parser::missing_flag_param), url(docsrs))]
MissingFlagParam(#[label = "flag missing param"] Span),
#[error("Batches of short flags can't take arguments.")]
#[diagnostic(code(nu::parser::short_flag_arg_cant_take_arg), url(docsrs))]
ShortFlagBatchCantTakeArg(#[label = "short flag batches can't take args"] Span),
#[error("Missing required positional argument.")]
#[diagnostic(code(nu::parser::missing_positional), url(docsrs))]
MissingPositional(String, #[label("missing {0}")] Span),
#[error("Missing argument to `{0}`.")]
#[diagnostic(code(nu::parser::keyword_missing_arg), url(docsrs))]
KeywordMissingArgument(String, #[label("missing value that follows {0}")] Span),
#[error("Missing type.")]
#[diagnostic(code(nu::parser::missing_type), url(docsrs))]
MissingType(#[label = "expected type"] Span),
#[error("Type mismatch.")]
#[diagnostic(code(nu::parser::type_mismatch), url(docsrs))]
TypeMismatch(Type, Type, #[label("expected {0:?}, found {1:?}")] Span), // expected, found, span
#[error("Missing required flag.")]
#[diagnostic(code(nu::parser::missing_required_flag), url(docsrs))]
MissingRequiredFlag(String, #[label("missing required flag {0}")] Span),
#[error("Incomplete math expression.")]
#[diagnostic(code(nu::parser::incomplete_math_expression), url(docsrs))]
IncompleteMathExpression(#[label = "incomplete math expression"] Span),
#[error("Unknown state.")]
#[diagnostic(code(nu::parser::unknown_state), url(docsrs))]
UnknownState(String, #[label("{0}")] Span),
#[error("Parser incomplete.")]
#[diagnostic(code(nu::parser::parser_incomplete), url(docsrs))]
IncompleteParser(#[label = "parser support missing for this expression"] Span),
#[error("Rest parameter needs a name.")]
#[diagnostic(code(nu::parser::rest_needs_name), url(docsrs))]
RestNeedsName(#[label = "needs a parameter name"] Span),
#[error("Extra columns.")]
#[diagnostic(code(nu::parser::extra_columns), url(docsrs))]
ExtraColumns(
usize,
#[label("expected {0} column{}", if *.0 == 1 { "" } else { "s" })] Span,
),
#[error("Missing columns.")]
#[diagnostic(code(nu::parser::missing_columns), url(docsrs))]
MissingColumns(
usize,
#[label("expected {0} column{}", if *.0 == 1 { "" } else { "s" })] Span,
),
#[error("{0}")]
#[diagnostic(code(nu::parser::assignment_mismatch), url(docsrs))]
AssignmentMismatch(String, String, #[label("{1}")] Span),
#[error("Missing import pattern.")]
#[diagnostic(code(nu::parser::missing_import_pattern), url(docsrs))]
MissingImportPattern(#[label = "needs an import pattern"] Span),
#[error("Module export not found.")]
#[diagnostic(code(nu::parser::export_not_found), url(docsrs))]
ExportNotFound(#[label = "could not find imports"] Span),
}

View File

@ -10,11 +10,13 @@ pub enum FlatShape {
Range,
InternalCall,
External,
ExternalArg,
Literal,
Operator,
Signature,
String,
Variable,
Custom(String),
}
pub fn flatten_block(working_set: &StateWorkingSet, block: &Block) -> Vec<(Span, FlatShape)> {
@ -39,6 +41,10 @@ pub fn flatten_expression(
working_set: &StateWorkingSet,
expr: &Expression,
) -> Vec<(Span, FlatShape)> {
if let Some(custom_completion) = &expr.custom_completion {
return vec![(expr.span, FlatShape::Custom(custom_completion.clone()))];
}
match &expr.expr {
Expr::BinaryOp(lhs, op, rhs) => {
let mut output = vec![];
@ -55,8 +61,14 @@ pub fn flatten_expression(
}
output
}
Expr::ExternalCall(..) => {
vec![(expr.span, FlatShape::External)]
Expr::ExternalCall(name, args) => {
let mut output = vec![(*name, FlatShape::External)];
for arg in args {
output.push((*arg, FlatShape::ExternalArg));
}
output
}
Expr::Garbage => {
vec![(expr.span, FlatShape::Garbage)]
@ -67,10 +79,10 @@ pub fn flatten_expression(
Expr::Float(_) => {
vec![(expr.span, FlatShape::Float)]
}
Expr::FullCellPath(column_path) => {
Expr::FullCellPath(cell_path) => {
let mut output = vec![];
output.extend(flatten_expression(working_set, &column_path.head));
for path_element in &column_path.tail {
output.extend(flatten_expression(working_set, &cell_path.head));
for path_element in &cell_path.tail {
match path_element {
PathMember::String { span, .. } => output.push((*span, FlatShape::String)),
PathMember::Int { span, .. } => output.push((*span, FlatShape::Int)),
@ -78,15 +90,19 @@ pub fn flatten_expression(
}
output
}
Expr::Range(from, to, op) => {
Expr::Range(from, next, to, op) => {
let mut output = vec![];
if let Some(f) = from {
output.extend(flatten_expression(working_set, f));
}
if let Some(s) = next {
output.extend(vec![(op.next_op_span, FlatShape::Operator)]);
output.extend(flatten_expression(working_set, s));
}
output.extend(vec![(op.span, FlatShape::Operator)]);
if let Some(t) = to {
output.extend(flatten_expression(working_set, t));
}
output.extend(vec![(op.span, FlatShape::Operator)]);
output
}
Expr::Bool(_) => {
@ -114,6 +130,7 @@ pub fn flatten_expression(
Expr::String(_) => {
vec![(expr.span, FlatShape::String)]
}
Expr::RowCondition(_, expr) => flatten_expression(working_set, expr),
Expr::Subexpression(block_id) => {
flatten_block(working_set, working_set.get_block(*block_id))
}

View File

@ -168,7 +168,7 @@ pub fn lex_item(
(delim as char).to_string(),
Span {
start: span.end,
end: span.end + 1,
end: span.end,
},
);
@ -181,7 +181,13 @@ pub fn lex_item(
// correct information from the non-lite parse.
return (
span,
Some(ParseError::UnexpectedEof((delim as char).to_string(), span)),
Some(ParseError::UnexpectedEof(
(delim as char).to_string(),
Span {
start: span.end,
end: span.end,
},
)),
);
}

View File

@ -2,6 +2,7 @@ mod errors;
mod flatten;
mod lex;
mod lite_parse;
mod parse_keywords;
mod parser;
mod type_check;
@ -9,4 +10,7 @@ pub use errors::ParseError;
pub use flatten::{flatten_block, FlatShape};
pub use lex::{lex, Token, TokenContents};
pub use lite_parse::{lite_parse, LiteBlock};
pub use parse_keywords::{
parse_alias, parse_def, parse_def_predecl, parse_let, parse_module, parse_use,
};
pub use parser::{parse, Import, VarDecl};

View File

@ -0,0 +1,605 @@
use nu_protocol::{
ast::{Block, Call, Expr, Expression, ImportPatternMember, Pipeline, Statement},
engine::StateWorkingSet,
span, DeclId, ShellError, Span, SyntaxShape, Type,
};
use std::path::Path;
use crate::{
lex, lite_parse,
parser::{
check_name, garbage, garbage_statement, parse, parse_block_expression,
parse_import_pattern, parse_internal_call, parse_signature, parse_string,
},
ParseError,
};
pub fn parse_def_predecl(working_set: &mut StateWorkingSet, spans: &[Span]) {
let name = working_set.get_span_contents(spans[0]);
if name == b"def" && spans.len() >= 4 {
let (name_expr, ..) = parse_string(working_set, spans[1]);
let name = name_expr.as_string();
working_set.enter_scope();
// FIXME: because parse_signature will update the scope with the variables it sees
// we end up parsing the signature twice per def. The first time is during the predecl
// so that we can see the types that are part of the signature, which we need for parsing.
// The second time is when we actually parse the body itworking_set.
// We can't reuse the first time because the variables that are created during parse_signature
// are lost when we exit the scope below.
let (sig, ..) = parse_signature(working_set, spans[2]);
let signature = sig.as_signature();
working_set.exit_scope();
if let (Some(name), Some(mut signature)) = (name, signature) {
signature.name = name;
let decl = signature.predeclare();
working_set.add_decl(decl);
}
}
}
pub fn parse_def(
working_set: &mut StateWorkingSet,
spans: &[Span],
) -> (Statement, Option<ParseError>) {
let mut error = None;
let name = working_set.get_span_contents(spans[0]);
if name == b"def" {
let def_decl_id = working_set
.find_decl(b"def")
.expect("internal error: missing def command");
let mut call = Box::new(Call {
head: spans[0],
decl_id: def_decl_id,
positional: vec![],
named: vec![],
});
let call = if let Some(name_span) = spans.get(1) {
let (name_expr, err) = parse_string(working_set, *name_span);
error = error.or(err);
let name = name_expr.as_string();
call.positional.push(name_expr);
if let Some(sig_span) = spans.get(2) {
working_set.enter_scope();
let (sig, err) = parse_signature(working_set, *sig_span);
error = error.or(err);
let signature = sig.as_signature();
call.positional.push(sig);
if let Some(block_span) = spans.get(3) {
let (block, err) = parse_block_expression(
working_set,
&SyntaxShape::Block(Some(vec![])),
*block_span,
);
error = error.or(err);
let block_id = block.as_block();
call.positional.push(block);
if let (Some(name), Some(mut signature), Some(block_id)) =
(name, signature, block_id)
{
let decl_id = working_set
.find_decl(name.as_bytes())
.expect("internal error: predeclaration failed to add definition");
let declaration = working_set.get_decl_mut(decl_id);
signature.name = name;
*declaration = signature.into_block_command(block_id);
}
} else {
let err_span = Span {
start: sig_span.end,
end: sig_span.end,
};
error = error
.or_else(|| Some(ParseError::MissingPositional("block".into(), err_span)));
}
working_set.exit_scope();
call
} else {
let err_span = Span {
start: name_span.end,
end: name_span.end,
};
error = error
.or_else(|| Some(ParseError::MissingPositional("parameters".into(), err_span)));
call
}
} else {
let err_span = Span {
start: spans[0].end,
end: spans[0].end,
};
error = error.or_else(|| {
Some(ParseError::MissingPositional(
"definition name".into(),
err_span,
))
});
call
};
(
Statement::Pipeline(Pipeline::from_vec(vec![Expression {
expr: Expr::Call(call),
span: span(spans),
ty: Type::Unknown,
custom_completion: None,
}])),
error,
)
} else {
(
garbage_statement(spans),
Some(ParseError::UnknownState(
"Expected structure: def <name> [] {}".into(),
span(spans),
)),
)
}
}
pub fn parse_alias(
working_set: &mut StateWorkingSet,
spans: &[Span],
) -> (Statement, Option<ParseError>) {
let name = working_set.get_span_contents(spans[0]);
if name == b"alias" {
if let Some((span, err)) = check_name(working_set, spans) {
return (
Statement::Pipeline(Pipeline::from_vec(vec![garbage(*span)])),
Some(err),
);
}
if let Some(decl_id) = working_set.find_decl(b"alias") {
let (call, call_span, _) =
parse_internal_call(working_set, spans[0], &spans[1..], decl_id);
if spans.len() >= 4 {
let alias_name = working_set.get_span_contents(spans[1]);
let alias_name = if alias_name.starts_with(b"\"")
&& alias_name.ends_with(b"\"")
&& alias_name.len() > 1
{
alias_name[1..(alias_name.len() - 1)].to_vec()
} else {
alias_name.to_vec()
};
let _equals = working_set.get_span_contents(spans[2]);
let replacement = spans[3..].to_vec();
//println!("{:?} {:?}", alias_name, replacement);
working_set.add_alias(alias_name, replacement);
}
return (
Statement::Pipeline(Pipeline::from_vec(vec![Expression {
expr: Expr::Call(call),
span: call_span,
ty: Type::Unknown,
custom_completion: None,
}])),
None,
);
}
}
(
garbage_statement(spans),
Some(ParseError::UnknownState(
"internal error: alias statement unparseable".into(),
span(spans),
)),
)
}
pub fn parse_module(
working_set: &mut StateWorkingSet,
spans: &[Span],
) -> (Statement, Option<ParseError>) {
// TODO: Currently, module is closing over its parent scope (i.e., defs in the parent scope are
// visible and usable in this module's scope). We might want to disable that. How?
let mut error = None;
let bytes = working_set.get_span_contents(spans[0]);
// parse_def() equivalent
if bytes == b"module" && spans.len() >= 3 {
let (module_name_expr, err) = parse_string(working_set, spans[1]);
error = error.or(err);
let module_name = module_name_expr
.as_string()
.expect("internal error: module name is not a string");
// parse_block_expression() equivalent
let block_span = spans[2];
let block_bytes = working_set.get_span_contents(block_span);
let mut start = block_span.start;
let mut end = block_span.end;
if block_bytes.starts_with(b"{") {
start += 1;
} else {
return (
garbage_statement(spans),
Some(ParseError::Expected("block".into(), block_span)),
);
}
if block_bytes.ends_with(b"}") {
end -= 1;
} else {
error = error.or_else(|| {
Some(ParseError::Unclosed(
"}".into(),
Span {
start: end,
end: end + 1,
},
))
});
}
let block_span = Span { start, end };
let source = working_set.get_span_contents(block_span);
let (output, err) = lex(source, start, &[], &[]);
error = error.or(err);
working_set.enter_scope();
// Do we need block parameters?
let (output, err) = lite_parse(&output);
error = error.or(err);
// We probably don't need $it
// we're doing parse_block() equivalent
// let (mut output, err) = parse_block(working_set, &output, false);
for pipeline in &output.block {
if pipeline.commands.len() == 1 {
parse_def_predecl(working_set, &pipeline.commands[0].parts);
}
}
let mut exports: Vec<(Vec<u8>, DeclId)> = vec![];
let block: Block = output
.block
.iter()
.map(|pipeline| {
if pipeline.commands.len() == 1 {
// this one here is doing parse_statement() equivalent
// let (stmt, err) = parse_statement(working_set, &pipeline.commands[0].parts);
let name = working_set.get_span_contents(pipeline.commands[0].parts[0]);
let (stmt, err) = match name {
// TODO: Here we can add other stuff that's alowed for modules
b"def" => {
let (stmt, err) = parse_def(working_set, &pipeline.commands[0].parts);
if err.is_none() {
let decl_name =
working_set.get_span_contents(pipeline.commands[0].parts[1]);
let decl_id = working_set
.find_decl(decl_name)
.expect("internal error: failed to find added declaration");
// TODO: Later, we want to put this behind 'export'
exports.push((decl_name.into(), decl_id));
}
(stmt, err)
}
_ => (
garbage_statement(&pipeline.commands[0].parts),
Some(ParseError::Expected(
"def".into(),
pipeline.commands[0].parts[0],
)),
),
};
if error.is_none() {
error = err;
}
stmt
} else {
error = Some(ParseError::Expected("not a pipeline".into(), block_span));
garbage_statement(spans)
}
})
.into();
let block = block.with_exports(exports);
working_set.exit_scope();
let block_id = working_set.add_module(&module_name, block);
let block_expr = Expression {
expr: Expr::Block(block_id),
span: block_span,
ty: Type::Block,
custom_completion: None,
};
let module_decl_id = working_set
.find_decl(b"module")
.expect("internal error: missing module command");
let call = Box::new(Call {
head: spans[0],
decl_id: module_decl_id,
positional: vec![module_name_expr, block_expr],
named: vec![],
});
(
Statement::Pipeline(Pipeline::from_vec(vec![Expression {
expr: Expr::Call(call),
span: span(spans),
ty: Type::Unknown,
custom_completion: None,
}])),
error,
)
} else {
(
garbage_statement(spans),
Some(ParseError::UnknownState(
"Expected structure: module <name> {}".into(),
span(spans),
)),
)
}
}
pub fn parse_use(
working_set: &mut StateWorkingSet,
spans: &[Span],
) -> (Statement, Option<ParseError>) {
let mut error = None;
let bytes = working_set.get_span_contents(spans[0]);
// TODO: Currently, this directly imports the module's definitions into the current scope.
// Later, we want to put them behind the module's name and add selective importing
if bytes == b"use" && spans.len() >= 2 {
let (module_name_expr, err) = parse_string(working_set, spans[1]);
error = error.or(err);
let (import_pattern, err) = parse_import_pattern(working_set, spans[1]);
error = error.or(err);
let exports = if let Some(block_id) = working_set.find_module(&import_pattern.head) {
// TODO: Since we don't use the Block at all, we might just as well create a separate
// Module that holds only the exports, without having Blocks in the way.
working_set.get_block(block_id).exports.clone()
} else {
return (
garbage_statement(spans),
Some(ParseError::ModuleNotFound(spans[1])),
);
};
let exports = if import_pattern.members.is_empty() {
exports
.into_iter()
.map(|(name, id)| {
let mut new_name = import_pattern.head.to_vec();
new_name.push(b'.');
new_name.extend(&name);
(new_name, id)
})
.collect()
} else {
match &import_pattern.members[0] {
ImportPatternMember::Glob { .. } => exports,
ImportPatternMember::Name { name, span } => {
let new_exports: Vec<(Vec<u8>, usize)> =
exports.into_iter().filter(|x| &x.0 == name).collect();
if new_exports.is_empty() {
error = error.or(Some(ParseError::ExportNotFound(*span)))
}
new_exports
}
ImportPatternMember::List { names } => {
let mut output = vec![];
for (name, span) in names {
let mut new_exports: Vec<(Vec<u8>, usize)> = exports
.iter()
.filter_map(|x| if &x.0 == name { Some(x.clone()) } else { None })
.collect();
if new_exports.is_empty() {
error = error.or(Some(ParseError::ExportNotFound(*span)))
} else {
output.append(&mut new_exports)
}
}
output
}
}
};
// Extend the current scope with the module's exports
working_set.activate_overlay(exports);
// Create the Use command call
let use_decl_id = working_set
.find_decl(b"use")
.expect("internal error: missing use command");
let call = Box::new(Call {
head: spans[0],
decl_id: use_decl_id,
positional: vec![module_name_expr],
named: vec![],
});
(
Statement::Pipeline(Pipeline::from_vec(vec![Expression {
expr: Expr::Call(call),
span: span(spans),
ty: Type::Unknown,
custom_completion: None,
}])),
error,
)
} else {
(
garbage_statement(spans),
Some(ParseError::UnknownState(
"Expected structure: use <name>".into(),
span(spans),
)),
)
}
}
pub fn parse_let(
working_set: &mut StateWorkingSet,
spans: &[Span],
) -> (Statement, Option<ParseError>) {
let name = working_set.get_span_contents(spans[0]);
if name == b"let" {
if let Some((span, err)) = check_name(working_set, spans) {
return (
Statement::Pipeline(Pipeline::from_vec(vec![garbage(*span)])),
Some(err),
);
}
if let Some(decl_id) = working_set.find_decl(b"let") {
let (call, call_span, err) =
parse_internal_call(working_set, spans[0], &spans[1..], decl_id);
// Update the variable to the known type if we can.
if err.is_none() {
let var_id = call.positional[0]
.as_var()
.expect("internal error: expected variable");
let rhs_type = call.positional[1].ty.clone();
working_set.set_variable_type(var_id, rhs_type);
}
return (
Statement::Pipeline(Pipeline::from_vec(vec![Expression {
expr: Expr::Call(call),
span: call_span,
ty: Type::Unknown,
custom_completion: None,
}])),
err,
);
}
}
(
garbage_statement(spans),
Some(ParseError::UnknownState(
"internal error: let statement unparseable".into(),
span(spans),
)),
)
}
pub fn parse_source(
working_set: &mut StateWorkingSet,
spans: &[Span],
) -> (Statement, Option<ParseError>) {
let name = working_set.get_span_contents(spans[0]);
if name == b"source" {
if let Some(decl_id) = working_set.find_decl(b"source") {
let (call, call_span, _) =
parse_internal_call(working_set, spans[0], &spans[1..], decl_id);
// Command and one file name
if spans.len() >= 2 {
let name_expr = working_set.get_span_contents(spans[1]);
if let Ok(filename) = String::from_utf8(name_expr.to_vec()) {
let source_file = Path::new(&filename);
let path = source_file;
let contents = std::fs::read(path);
if let Ok(contents) = contents {
// This will load the defs from the file into the
// working set, if it was a successful parse.
let (block, err) = parse(
working_set,
path.file_name().and_then(|x| x.to_str()),
&contents,
false,
);
if let Some(_) = err {
// Unsuccessful parse of file
// return (
// Statement::Pipeline(Pipeline::from_vec(vec![Expression {
// expr: Expr::Call(call),
// span: call_span,
// ty: Type::Unknown,
// }])),
// None,
// );
}
}
}
}
return (
Statement::Pipeline(Pipeline::from_vec(vec![Expression {
expr: Expr::Call(call),
span: call_span,
ty: Type::Unknown,
custom_completion: None,
}])),
None,
);
}
}
(
garbage_statement(spans),
Some(ParseError::UnknownState(
"internal error: let statement unparseable".into(),
span(spans),
)),
)
}

File diff suppressed because it is too large Load Diff

View File

@ -20,6 +20,7 @@ pub fn math_result_type(
op: &mut Expression,
rhs: &mut Expression,
) -> (Type, Option<ParseError>) {
//println!("checking: {:?} {:?} {:?}", lhs, op, rhs);
match &op.expr {
Expr::Operator(operator) => match operator {
Operator::Plus => match (&lhs.ty, &rhs.ty) {
@ -31,6 +32,7 @@ pub fn math_result_type(
(Type::Unknown, _) => (Type::Unknown, None),
(_, Type::Unknown) => (Type::Unknown, None),
(Type::Int, _) => {
let ty = rhs.ty.clone();
*rhs = Expression::garbage(rhs.span);
(
Type::Unknown,
@ -39,7 +41,7 @@ pub fn math_result_type(
lhs.span,
lhs.ty.clone(),
rhs.span,
rhs.ty.clone(),
ty,
)),
)
}

View File

@ -2,10 +2,43 @@ use nu_parser::ParseError;
use nu_parser::*;
use nu_protocol::{
ast::{Expr, Expression, Pipeline, Statement},
engine::{EngineState, StateWorkingSet},
engine::{Command, EngineState, StateWorkingSet},
Signature, SyntaxShape,
};
#[cfg(test)]
pub struct Let;
#[cfg(test)]
impl Command for Let {
fn name(&self) -> &str {
"let"
}
fn usage(&self) -> &str {
"Create a variable and give it a value."
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("let")
.required("var_name", SyntaxShape::VarWithOptType, "variable name")
.required(
"initial_value",
SyntaxShape::Keyword(b"=".to_vec(), Box::new(SyntaxShape::Expression)),
"equals sign followed by value",
)
}
fn run(
&self,
_context: &nu_protocol::engine::EvaluationContext,
_call: &nu_protocol::ast::Call,
_input: nu_protocol::Value,
) -> Result<nu_protocol::Value, nu_protocol::ShellError> {
todo!()
}
}
#[test]
pub fn parse_int() {
let engine_state = EngineState::new();
@ -164,6 +197,7 @@ mod range {
Expression {
expr: Expr::Range(
Some(_),
None,
Some(_),
RangeOperator {
inclusion: RangeInclusion::Inclusive,
@ -195,6 +229,7 @@ mod range {
Expression {
expr: Expr::Range(
Some(_),
None,
Some(_),
RangeOperator {
inclusion: RangeInclusion::RightExclusive,
@ -209,6 +244,38 @@ mod range {
}
}
#[test]
fn parse_reverse_range() {
let engine_state = EngineState::new();
let mut working_set = StateWorkingSet::new(&engine_state);
let (block, err) = parse(&mut working_set, None, b"10..0", true);
assert!(err.is_none());
assert!(block.len() == 1);
match &block[0] {
Statement::Pipeline(Pipeline { expressions }) => {
assert!(expressions.len() == 1);
assert!(matches!(
expressions[0],
Expression {
expr: Expr::Range(
Some(_),
None,
Some(_),
RangeOperator {
inclusion: RangeInclusion::Inclusive,
..
}
),
..
}
))
}
_ => panic!("No match"),
}
}
#[test]
fn parse_subexpression_range() {
let engine_state = EngineState::new();
@ -226,6 +293,7 @@ mod range {
Expression {
expr: Expr::Range(
Some(_),
None,
Some(_),
RangeOperator {
inclusion: RangeInclusion::RightExclusive,
@ -245,6 +313,8 @@ mod range {
let engine_state = EngineState::new();
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(Let));
let (block, err) = parse(&mut working_set, None, b"let a = 2; $a..10", true);
assert!(err.is_none());
@ -257,6 +327,7 @@ mod range {
Expression {
expr: Expr::Range(
Some(_),
None,
Some(_),
RangeOperator {
inclusion: RangeInclusion::Inclusive,
@ -276,6 +347,8 @@ mod range {
let engine_state = EngineState::new();
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(Let));
let (block, err) = parse(&mut working_set, None, b"let a = 2; $a..<($a + 10)", true);
assert!(err.is_none());
@ -288,6 +361,7 @@ mod range {
Expression {
expr: Expr::Range(
Some(_),
None,
Some(_),
RangeOperator {
inclusion: RangeInclusion::RightExclusive,
@ -320,6 +394,39 @@ mod range {
expr: Expr::Range(
Some(_),
None,
None,
RangeOperator {
inclusion: RangeInclusion::Inclusive,
..
}
),
..
}
))
}
_ => panic!("No match"),
}
}
#[test]
fn parse_left_unbounded_range() {
let engine_state = EngineState::new();
let mut working_set = StateWorkingSet::new(&engine_state);
let (block, err) = parse(&mut working_set, None, b"..10", true);
assert!(err.is_none());
assert!(block.len() == 1);
match &block[0] {
Statement::Pipeline(Pipeline { expressions }) => {
assert!(expressions.len() == 1);
assert!(matches!(
expressions[0],
Expression {
expr: Expr::Range(
None,
None,
Some(_),
RangeOperator {
inclusion: RangeInclusion::Inclusive,
..
@ -349,6 +456,39 @@ mod range {
expressions[0],
Expression {
expr: Expr::Range(
Some(_),
None,
Some(_),
RangeOperator {
inclusion: RangeInclusion::Inclusive,
..
}
),
..
}
))
}
_ => panic!("No match"),
}
}
#[test]
fn parse_float_range() {
let engine_state = EngineState::new();
let mut working_set = StateWorkingSet::new(&engine_state);
let (block, err) = parse(&mut working_set, None, b"2.0..4.0..10.0", true);
assert!(err.is_none());
assert!(block.len() == 1);
match &block[0] {
Statement::Pipeline(Pipeline { expressions }) => {
assert!(expressions.len() == 1);
assert!(matches!(
expressions[0],
Expression {
expr: Expr::Range(
Some(_),
Some(_),
Some(_),
RangeOperator {

12
crates/nu-path/Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[package]
authors = ["The Nu Project Contributors"]
description = "Path handling library for Nushell"
edition = "2018"
license = "MIT"
name = "nu-path"
version = "0.37.1"
[dependencies]
dirs-next = "2.0.0"
dunce = "1.0.1"

3
crates/nu-path/README.md Normal file
View File

@ -0,0 +1,3 @@
# nu-path
This crate takes care of path handling in Nushell, such as canonicalization and component expansion, as well as other path-related utilities.

259
crates/nu-path/src/dots.rs Normal file
View File

@ -0,0 +1,259 @@
use std::path::{is_separator, Component, Path, PathBuf};
const EXPAND_STR: &str = if cfg!(windows) { r"..\" } else { "../" };
fn handle_dots_push(string: &mut String, count: u8) {
if count < 1 {
return;
}
if count == 1 {
string.push('.');
return;
}
for _ in 0..(count - 1) {
string.push_str(EXPAND_STR);
}
string.pop(); // remove last '/'
}
/// Expands any occurence of more than two dots into a sequence of ../ (or ..\ on windows), e.g.,
/// "..." into "../..", "...." into "../../../", etc.
pub fn expand_ndots(path: impl AsRef<Path>) -> PathBuf {
// Check if path is valid UTF-8 and if not, return it as it is to avoid breaking it via string
// conversion.
let path_str = match path.as_ref().to_str() {
Some(s) => s,
None => return path.as_ref().into(),
};
// find if we need to expand any >2 dot paths and early exit if not
let mut dots_count = 0u8;
let ndots_present = {
for chr in path_str.chars() {
if chr == '.' {
dots_count += 1;
} else {
if is_separator(chr) && (dots_count > 2) {
// this path component had >2 dots
break;
}
dots_count = 0;
}
}
dots_count > 2
};
if !ndots_present {
return path.as_ref().into();
}
let mut dots_count = 0u8;
let mut expanded = String::new();
for chr in path_str.chars() {
if chr == '.' {
dots_count += 1;
} else {
if is_separator(chr) {
// check for dots expansion only at path component boundaries
handle_dots_push(&mut expanded, dots_count);
dots_count = 0;
} else {
// got non-dot within path component => do not expand any dots
while dots_count > 0 {
expanded.push('.');
dots_count -= 1;
}
}
expanded.push(chr);
}
}
handle_dots_push(&mut expanded, dots_count);
expanded.into()
}
/// Expand "." and ".." into nothing and parent directory, respectively.
pub fn expand_dots(path: impl AsRef<Path>) -> PathBuf {
let path = path.as_ref();
// Early-exit if path does not contain '.' or '..'
if !path
.components()
.any(|c| std::matches!(c, Component::CurDir | Component::ParentDir))
{
return path.into();
}
let mut result = PathBuf::with_capacity(path.as_os_str().len());
// Only pop/skip path elements if the previous one was an actual path element
let prev_is_normal = |p: &Path| -> bool {
p.components()
.next_back()
.map(|c| std::matches!(c, Component::Normal(_)))
.unwrap_or(false)
};
path.components().for_each(|component| match component {
Component::ParentDir if prev_is_normal(&result) => {
result.pop();
}
Component::CurDir if prev_is_normal(&result) => {}
_ => result.push(component),
});
dunce::simplified(&result).to_path_buf()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn expand_two_dots() {
let path = Path::new("/foo/bar/..");
assert_eq!(
PathBuf::from("/foo"), // missing path
expand_dots(path)
);
}
#[test]
fn expand_dots_with_curdir() {
let path = Path::new("/foo/./bar/./baz");
assert_eq!(PathBuf::from("/foo/bar/baz"), expand_dots(path));
}
fn check_ndots_expansion(expected: &str, s: &str) {
let expanded = expand_ndots(Path::new(s));
assert_eq!(Path::new(expected), &expanded);
}
// common tests
#[test]
fn string_without_ndots() {
check_ndots_expansion("../hola", "../hola");
}
#[test]
fn string_with_three_ndots_and_chars() {
check_ndots_expansion("a...b", "a...b");
}
#[test]
fn string_with_two_ndots_and_chars() {
check_ndots_expansion("a..b", "a..b");
}
#[test]
fn string_with_one_dot_and_chars() {
check_ndots_expansion("a.b", "a.b");
}
#[test]
fn expand_dots_double_dots_no_change() {
// Can't resolve this as we don't know our parent dir
assert_eq!(Path::new(".."), expand_dots(Path::new("..")));
}
#[test]
fn expand_dots_single_dot_no_change() {
// Can't resolve this as we don't know our current dir
assert_eq!(Path::new("."), expand_dots(Path::new(".")));
}
#[test]
fn expand_dots_multi_single_dots_no_change() {
assert_eq!(Path::new("././."), expand_dots(Path::new("././.")));
}
#[test]
fn expand_multi_double_dots_no_change() {
assert_eq!(Path::new("../../../"), expand_dots(Path::new("../../../")));
}
#[test]
fn expand_dots_no_change_with_dirs() {
// Can't resolve this as we don't know our parent dir
assert_eq!(
Path::new("../../../dir1/dir2/"),
expand_dots(Path::new("../../../dir1/dir2"))
);
}
#[test]
fn expand_dots_simple() {
assert_eq!(Path::new("/foo"), expand_dots(Path::new("/foo/bar/..")));
}
#[test]
fn expand_dots_complex() {
assert_eq!(
Path::new("/test"),
expand_dots(Path::new("/foo/./bar/../../test/././test2/../"))
);
}
#[cfg(windows)]
mod windows {
use super::*;
#[test]
fn string_with_three_ndots() {
check_ndots_expansion(r"..\..", "...");
}
#[test]
fn string_with_mixed_ndots_and_chars() {
check_ndots_expansion(
r"a...b/./c..d/../e.f/..\..\..//.",
"a...b/./c..d/../e.f/....//.",
);
}
#[test]
fn string_with_three_ndots_and_final_slash() {
check_ndots_expansion(r"..\../", ".../");
}
#[test]
fn string_with_three_ndots_and_garbage() {
check_ndots_expansion(r"ls ..\../ garbage.*[", "ls .../ garbage.*[");
}
}
#[cfg(not(windows))]
mod non_windows {
use super::*;
#[test]
fn string_with_three_ndots() {
check_ndots_expansion(r"../..", "...");
}
#[test]
fn string_with_mixed_ndots_and_chars() {
check_ndots_expansion(
"a...b/./c..d/../e.f/../../..//.",
"a...b/./c..d/../e.f/....//.",
);
}
#[test]
fn string_with_three_ndots_and_final_slash() {
check_ndots_expansion("../../", ".../");
}
#[test]
fn string_with_three_ndots_and_garbage() {
check_ndots_expansion("ls ../../ garbage.*[", "ls .../ garbage.*[");
}
}
}

View File

@ -0,0 +1,75 @@
use std::io;
use std::path::{Path, PathBuf};
use super::dots::{expand_dots, expand_ndots};
use super::tilde::expand_tilde;
// Join a path relative to another path. Paths starting with tilde are considered as absolute.
fn join_path_relative<P, Q>(path: P, relative_to: Q) -> PathBuf
where
P: AsRef<Path>,
Q: AsRef<Path>,
{
let path = path.as_ref();
let relative_to = relative_to.as_ref();
if path == Path::new(".") {
// Joining a Path with '.' appends a '.' at the end, making the prompt
// more ugly - so we don't do anything, which should result in an equal
// path on all supported systems.
relative_to.into()
} else if path.starts_with("~") {
// do not end up with "/some/path/~"
path.into()
} else {
relative_to.join(path)
}
}
/// Resolve all symbolic links and all components (tilde, ., .., ...+) and return the path in its
/// absolute form.
///
/// Fails under the same conditions as
/// [std::fs::canonicalize](https://doc.rust-lang.org/std/fs/fn.canonicalize.html).
pub fn canonicalize(path: impl AsRef<Path>) -> io::Result<PathBuf> {
let path = expand_tilde(path);
let path = expand_ndots(path);
dunce::canonicalize(path)
}
/// Same as canonicalize() but the input path is specified relative to another path
pub fn canonicalize_with<P, Q>(path: P, relative_to: Q) -> io::Result<PathBuf>
where
P: AsRef<Path>,
Q: AsRef<Path>,
{
let path = join_path_relative(path, relative_to);
canonicalize(path)
}
/// Resolve only path components (tilde, ., .., ...+), if possible.
///
/// The function works in a "best effort" mode: It does not fail but rather returns the unexpanded
/// version if the expansion is not possible.
///
/// Furthermore, unlike canonicalize(), it does not use sys calls (such as readlink).
///
/// Does not convert to absolute form nor does it resolve symlinks.
pub fn expand_path(path: impl AsRef<Path>) -> PathBuf {
let path = expand_tilde(path);
let path = expand_ndots(path);
expand_dots(path)
}
/// Same as expand_path() but the input path is specified relative to another path
pub fn expand_path_with<P, Q>(path: P, relative_to: Q) -> PathBuf
where
P: AsRef<Path>,
Q: AsRef<Path>,
{
let path = join_path_relative(path, relative_to);
expand_path(path)
}

View File

@ -0,0 +1,8 @@
mod dots;
mod expansions;
mod tilde;
mod util;
pub use expansions::{canonicalize, canonicalize_with, expand_path, expand_path_with};
pub use tilde::expand_tilde;
pub use util::trim_trailing_slash;

View File

@ -0,0 +1,85 @@
use std::path::{Path, PathBuf};
fn expand_tilde_with(path: impl AsRef<Path>, home: Option<PathBuf>) -> PathBuf {
let path = path.as_ref();
if !path.starts_with("~") {
return path.into();
}
match home {
None => path.into(),
Some(mut h) => {
if h == Path::new("/") {
// Corner case: `h` is a root directory;
// don't prepend extra `/`, just drop the tilde.
path.strip_prefix("~").unwrap_or(path).into()
} else {
if let Ok(p) = path.strip_prefix("~/") {
h.push(p)
}
h
}
}
}
}
/// Expand tilde ("~") into a home directory if it is the first path component
pub fn expand_tilde(path: impl AsRef<Path>) -> PathBuf {
// TODO: Extend this to work with "~user" style of home paths
expand_tilde_with(path, dirs_next::home_dir())
}
#[cfg(test)]
mod tests {
use super::*;
fn check_expanded(s: &str) {
let home = Path::new("/home");
let buf = Some(PathBuf::from(home));
assert!(expand_tilde_with(Path::new(s), buf).starts_with(&home));
// Tests the special case in expand_tilde for "/" as home
let home = Path::new("/");
let buf = Some(PathBuf::from(home));
assert!(!expand_tilde_with(Path::new(s), buf).starts_with("//"));
}
fn check_not_expanded(s: &str) {
let home = PathBuf::from("/home");
let expanded = expand_tilde_with(Path::new(s), Some(home));
assert!(expanded == Path::new(s));
}
#[test]
fn string_with_tilde() {
check_expanded("~");
}
#[test]
fn string_with_tilde_forward_slash() {
check_expanded("~/test/");
}
#[test]
fn string_with_tilde_double_forward_slash() {
check_expanded("~//test/");
}
#[test]
fn does_not_expand_tilde_if_tilde_is_not_first_character() {
check_not_expanded("1~1");
}
#[cfg(windows)]
#[test]
fn string_with_tilde_backslash() {
check_expanded("~\\test/test2/test3");
}
#[cfg(windows)]
#[test]
fn string_with_double_tilde_backslash() {
check_expanded("~\\\\test\\test2/test3");
}
}

View File

@ -0,0 +1,4 @@
/// Trim trailing path separator from a string
pub fn trim_trailing_slash(s: &str) -> &str {
s.trim_end_matches(std::path::is_separator)
}

View File

@ -0,0 +1 @@
mod util;

View File

@ -0,0 +1,45 @@
use nu_path::trim_trailing_slash;
use std::path::MAIN_SEPARATOR;
/// Helper function that joins string literals with '/' or '\', based on the host OS
fn join_path_sep(pieces: &[&str]) -> String {
let sep_string = String::from(MAIN_SEPARATOR);
pieces.join(&sep_string)
}
#[test]
fn trims_trailing_slash_without_trailing_slash() {
let path = join_path_sep(&["some", "path"]);
let actual = trim_trailing_slash(&path);
assert_eq!(actual, &path)
}
#[test]
fn trims_trailing_slash() {
let path = join_path_sep(&["some", "path", ""]);
let actual = trim_trailing_slash(&path);
let expected = join_path_sep(&["some", "path"]);
assert_eq!(actual, &expected)
}
#[test]
fn trims_many_trailing_slashes() {
let path = join_path_sep(&["some", "path", "", "", "", ""]);
let actual = trim_trailing_slash(&path);
let expected = join_path_sep(&["some", "path"]);
assert_eq!(actual, &expected)
}
#[test]
fn trims_trailing_slash_empty() {
let path = String::from(MAIN_SEPARATOR);
let actual = trim_trailing_slash(&path);
assert_eq!(actual, "")
}

View File

@ -6,4 +6,6 @@ edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
codespan-reporting = "0.11.1"
thiserror = "1.0.29"
miette = "3.0.0"
serde = "1.0.130"

View File

@ -1,6 +1,6 @@
use std::ops::{Index, IndexMut};
use crate::Signature;
use crate::{DeclId, Signature};
use super::Statement;
@ -8,6 +8,7 @@ use super::Statement;
pub struct Block {
pub signature: Box<Signature>,
pub stmts: Vec<Statement>,
pub exports: Vec<(Vec<u8>, DeclId)>, // Assuming just defs for now
}
impl Block {
@ -45,6 +46,28 @@ impl Block {
Self {
signature: Box::new(Signature::new("")),
stmts: vec![],
exports: vec![],
}
}
pub fn with_exports(self, exports: Vec<(Vec<u8>, DeclId)>) -> Self {
Self {
signature: self.signature,
stmts: self.stmts,
exports,
}
}
}
impl<T> From<T> for Block
where
T: Iterator<Item = Statement>,
{
fn from(stmts: T) -> Self {
Self {
signature: Box::new(Signature::new("")),
stmts: stmts.collect(),
exports: vec![],
}
}
}

View File

@ -25,4 +25,24 @@ impl Call {
named: vec![],
}
}
pub fn has_flag(&self, flag_name: &str) -> bool {
for name in &self.named {
if flag_name == name.0 {
return true;
}
}
false
}
pub fn get_flag_expr(&self, flag_name: &str) -> Option<Expression> {
for name in &self.named {
if flag_name == name.0 {
return name.1.clone();
}
}
None
}
}

View File

@ -7,14 +7,16 @@ pub enum Expr {
Int(i64),
Float(f64),
Range(
Option<Box<Expression>>,
Option<Box<Expression>>,
Option<Box<Expression>>, // from
Option<Box<Expression>>, // next value after "from"
Option<Box<Expression>>, // to
RangeOperator,
),
Var(VarId),
Call(Box<Call>),
ExternalCall(Vec<u8>, Vec<Vec<u8>>),
ExternalCall(Span, Vec<Span>),
Operator(Operator),
RowCondition(VarId, Box<Expression>),
BinaryOp(Box<Expression>, Box<Expression>, Box<Expression>), //lhs, op, rhs
Subexpression(BlockId),
Block(BlockId),

View File

@ -6,6 +6,7 @@ pub struct Expression {
pub expr: Expr,
pub span: Span,
pub ty: Type,
pub custom_completion: Option<String>,
}
impl Expression {
@ -14,6 +15,7 @@ impl Expression {
expr: Expr::Garbage,
span,
ty: Type::Unknown,
custom_completion: None,
}
}

View File

@ -0,0 +1,14 @@
use crate::Span;
#[derive(Debug, Clone)]
pub enum ImportPatternMember {
Glob { span: Span },
Name { name: Vec<u8>, span: Span },
List { names: Vec<(Vec<u8>, Span)> },
}
#[derive(Debug, Clone)]
pub struct ImportPattern {
pub head: Vec<u8>,
pub members: Vec<ImportPatternMember>,
}

View File

@ -3,6 +3,7 @@ mod call;
mod cell_path;
mod expr;
mod expression;
mod import_pattern;
mod operator;
mod pipeline;
mod statement;
@ -12,6 +13,7 @@ pub use call::*;
pub use cell_path::*;
pub use expr::*;
pub use expression::*;
pub use import_pattern::*;
pub use operator::*;
pub use pipeline::*;
pub use statement::*;

View File

@ -1,8 +1,9 @@
use crate::Span;
use serde::{Deserialize, Serialize};
use std::fmt::Display;
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum Operator {
Equal,
NotEqual,
@ -49,7 +50,7 @@ impl Display for Operator {
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
pub enum RangeInclusion {
Inclusive,
RightExclusive,
@ -59,6 +60,7 @@ pub enum RangeInclusion {
pub struct RangeOperator {
pub inclusion: RangeInclusion,
pub span: Span,
pub next_op_span: Span,
}
impl Display for RangeOperator {

View File

@ -2,7 +2,7 @@ use crate::{ast::Call, value::Value, BlockId, Example, ShellError, Signature};
use super::EvaluationContext;
pub trait Command {
pub trait Command: Send + Sync {
fn name(&self) -> &str;
fn signature(&self) -> Signature {

View File

@ -1,7 +1,7 @@
use super::Command;
use crate::{ast::Block, BlockId, DeclId, Span, Type, VarId};
use crate::{ast::Block, BlockId, DeclId, Signature, Span, Type, VarId};
use core::panic;
use std::{collections::HashMap, ops::Range, slice::Iter};
use std::{collections::HashMap, slice::Iter};
pub struct EngineState {
files: Vec<(String, usize, usize)>,
@ -17,6 +17,7 @@ pub struct ScopeFrame {
vars: HashMap<Vec<u8>, VarId>,
decls: HashMap<Vec<u8>, DeclId>,
aliases: HashMap<Vec<u8>, Vec<Span>>,
modules: HashMap<Vec<u8>, BlockId>,
}
impl ScopeFrame {
@ -25,6 +26,7 @@ impl ScopeFrame {
vars: HashMap::new(),
decls: HashMap::new(),
aliases: HashMap::new(),
modules: HashMap::new(),
}
}
@ -76,6 +78,9 @@ impl EngineState {
for item in first.aliases.into_iter() {
last.aliases.insert(item.0, item.1);
}
for item in first.modules.into_iter() {
last.modules.insert(item.0, item.1);
}
}
}
@ -113,6 +118,11 @@ impl EngineState {
}
}
pub fn print_contents(&self) {
let string = String::from_utf8_lossy(&self.file_contents);
println!("{}", string);
}
pub fn find_decl(&self, name: &[u8]) -> Option<DeclId> {
for scope in self.scope.iter().rev() {
if let Some(decl_id) = scope.decls.get(name) {
@ -123,6 +133,24 @@ impl EngineState {
None
}
pub fn find_commands_by_prefix(&self, name: &[u8]) -> Vec<Vec<u8>> {
let mut output = vec![];
for scope in self.scope.iter().rev() {
for decl in &scope.decls {
if decl.0.starts_with(name) {
output.push(decl.0.clone());
}
}
}
output
}
pub fn get_span_contents(&self, span: &Span) -> &[u8] {
&self.file_contents[span.start..span.end]
}
pub fn get_var(&self, var_id: VarId) -> &Type {
self.vars
.get(var_id)
@ -136,6 +164,21 @@ impl EngineState {
.expect("internal error: missing declaration")
}
pub fn get_decls(&self) -> Vec<Signature> {
let mut output = vec![];
for decl in self.decls.iter() {
if decl.get_block_id().is_none() {
let mut signature = (*decl).signature();
signature.usage = decl.usage().to_string();
signature.extra_usage = decl.extra_usage().to_string();
output.push(signature);
}
}
output
}
pub fn get_block(&self, block_id: BlockId) -> &Block {
self.blocks
.get(block_id)
@ -272,6 +315,37 @@ impl<'a> StateWorkingSet<'a> {
self.num_blocks() - 1
}
pub fn add_module(&mut self, name: &str, block: Block) -> BlockId {
let name = name.as_bytes().to_vec();
self.delta.blocks.push(block);
let block_id = self.num_blocks() - 1;
let scope_frame = self
.delta
.scope
.last_mut()
.expect("internal error: missing required scope frame");
scope_frame.modules.insert(name, block_id);
block_id
}
pub fn activate_overlay(&mut self, overlay: Vec<(Vec<u8>, DeclId)>) {
// TODO: This will overwrite all existing definitions in a scope. When we add deactivate,
// we need to re-think how make it recoverable.
let scope_frame = self
.delta
.scope
.last_mut()
.expect("internal error: missing required scope frame");
for (name, decl_id) in overlay {
scope_frame.decls.insert(name, decl_id);
}
}
pub fn next_span_start(&self) -> usize {
self.permanent_state.next_span_start() + self.delta.file_contents.len()
}
@ -357,6 +431,22 @@ impl<'a> StateWorkingSet<'a> {
None
}
pub fn find_module(&self, name: &[u8]) -> Option<BlockId> {
for scope in self.delta.scope.iter().rev() {
if let Some(block_id) = scope.modules.get(name) {
return Some(*block_id);
}
}
for scope in self.permanent_state.scope.iter().rev() {
if let Some(block_id) = scope.modules.get(name) {
return Some(*block_id);
}
}
None
}
// pub fn update_decl(&mut self, decl_id: usize, block: Option<BlockId>) {
// let decl = self.get_decl_mut(decl_id);
// decl.body = block;
@ -496,6 +586,24 @@ impl<'a> StateWorkingSet<'a> {
}
}
pub fn find_commands_by_prefix(&self, name: &[u8]) -> Vec<Vec<u8>> {
let mut output = vec![];
for scope in self.delta.scope.iter().rev() {
for decl in &scope.decls {
if decl.0.starts_with(name) {
output.push(decl.0.clone());
}
}
}
let mut permanent = self.permanent_state.find_commands_by_prefix(name);
output.append(&mut permanent);
output
}
pub fn get_block(&self, block_id: BlockId) -> &Block {
let num_permanent_blocks = self.permanent_state.num_blocks();
if block_id < num_permanent_blocks {
@ -513,95 +621,83 @@ impl<'a> StateWorkingSet<'a> {
}
}
impl<'a> codespan_reporting::files::Files<'a> for StateWorkingSet<'a> {
type FileId = usize;
type Name = String;
type Source = String;
fn name(&'a self, id: Self::FileId) -> Result<Self::Name, codespan_reporting::files::Error> {
Ok(self.get_filename(id))
}
fn source(
&'a self,
id: Self::FileId,
) -> Result<Self::Source, codespan_reporting::files::Error> {
Ok(self.get_file_source(id))
}
fn line_index(
&'a self,
id: Self::FileId,
byte_index: usize,
) -> Result<usize, codespan_reporting::files::Error> {
let source = self.get_file_source(id);
let mut count = 0;
for byte in source.bytes().enumerate() {
if byte.0 == byte_index {
// println!("count: {} for file: {} index: {}", count, id, byte_index);
return Ok(count);
}
if byte.1 == b'\n' {
count += 1;
}
impl<'a> miette::SourceCode for &StateWorkingSet<'a> {
fn read_span<'b>(
&'b self,
span: &miette::SourceSpan,
context_lines_before: usize,
context_lines_after: usize,
) -> Result<Box<dyn miette::SpanContents + 'b>, miette::MietteError> {
let debugging = std::env::var("MIETTE_DEBUG").is_ok();
if debugging {
let finding_span = "Finding span in StateWorkingSet";
dbg!(finding_span, span);
}
// println!("count: {} for file: {} index: {}", count, id, byte_index);
Ok(count)
}
fn line_range(
&'a self,
id: Self::FileId,
line_index: usize,
) -> Result<Range<usize>, codespan_reporting::files::Error> {
let source = self.get_file_source(id);
let mut count = 0;
let mut start = Some(0);
let mut end = None;
for byte in source.bytes().enumerate() {
#[allow(clippy::comparison_chain)]
if count > line_index {
let start = start.expect("internal error: couldn't find line");
let end = end.expect("internal error: couldn't find line");
// println!(
// "Span: {}..{} for fileid: {} index: {}",
// start, end, id, line_index
// );
return Ok(start..end);
} else if count == line_index {
end = Some(byte.0 + 1);
for (filename, start, end) in self.files() {
if debugging {
dbg!(&filename, start, end);
}
if span.offset() >= *start && span.offset() + span.len() <= *end {
if debugging {
let found_file = "Found matching file";
dbg!(found_file);
}
let our_span = Span {
start: *start,
end: *end,
};
// We need to move to a local span because we're only reading
// the specific file contents via self.get_span_contents.
let local_span = (span.offset() - *start, span.len()).into();
if debugging {
dbg!(&local_span);
}
let span_contents = self.get_span_contents(our_span);
if debugging {
dbg!(String::from_utf8_lossy(span_contents));
}
let span_contents = span_contents.read_span(
&local_span,
context_lines_before,
context_lines_after,
)?;
let content_span = span_contents.span();
// Back to "global" indexing
let retranslated = (content_span.offset() + start, content_span.len()).into();
if debugging {
dbg!(&retranslated);
}
#[allow(clippy::comparison_chain)]
if byte.1 == b'\n' {
count += 1;
if count > line_index {
break;
} else if count == line_index {
start = Some(byte.0 + 1);
let data = span_contents.data();
if filename == "<cli>" {
if debugging {
let success_cli = "Successfully read CLI span";
dbg!(success_cli, String::from_utf8_lossy(data));
}
return Ok(Box::new(miette::MietteSpanContents::new(
data,
retranslated,
span_contents.line(),
span_contents.column(),
span_contents.line_count(),
)));
} else {
if debugging {
let success_file = "Successfully read file span";
dbg!(success_file);
}
return Ok(Box::new(miette::MietteSpanContents::new_named(
filename.clone(),
data,
retranslated,
span_contents.line(),
span_contents.column(),
span_contents.line_count(),
)));
}
}
}
match (start, end) {
(Some(start), Some(end)) => {
// println!(
// "Span: {}..{} for fileid: {} index: {}",
// start, end, id, line_index
// );
Ok(start..end)
}
_ => Err(codespan_reporting::files::Error::FileMissing),
}
Err(miette::MietteError::OutOfBounds)
}
}

View File

@ -1,7 +1,7 @@
use super::EngineState;
use std::{cell::RefCell, collections::HashMap, rc::Rc};
use crate::{ShellError, Value, VarId};
use crate::{ShellError, Signature, Value, VarId};
#[derive(Clone)]
pub struct EvaluationContext {
@ -46,6 +46,10 @@ impl EvaluationContext {
pub fn print_stack(&self) {
self.stack.print_stack();
}
pub fn get_commands_info(&self) -> Vec<Signature> {
self.engine_state.borrow().get_decls()
}
}
#[derive(Debug)]
@ -104,6 +108,10 @@ impl Stack {
})))
}
pub fn get_env_vars(&self) -> HashMap<String, String> {
self.0.borrow().env_vars.clone()
}
pub fn print_stack(&self) {
println!("===frame===");
println!("vars:");

View File

@ -1,24 +1,81 @@
use miette::Diagnostic;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{ast::Operator, Span, Type};
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Error, Diagnostic, Serialize, Deserialize)]
pub enum ShellError {
#[error("Type mismatch during operation.")]
#[diagnostic(code(nu::shell::type_mismatch), url(docsrs))]
OperatorMismatch {
#[label = "type mismatch for operator"]
op_span: Span,
lhs_ty: Type,
#[label("{lhs_ty}")]
lhs_span: Span,
rhs_ty: Type,
#[label("{rhs_ty}")]
rhs_span: Span,
},
UnsupportedOperator(Operator, Span),
UnknownOperator(String, Span),
ExternalNotSupported(Span),
#[error("Unsupported operator: {0}.")]
#[diagnostic(code(nu::shell::unsupported_operator), url(docsrs))]
UnsupportedOperator(Operator, #[label = "unsupported operator"] Span),
#[error("Unsupported operator: {0}.")]
#[diagnostic(code(nu::shell::unknown_operator), url(docsrs))]
UnknownOperator(String, #[label = "unsupported operator"] Span),
#[error("External commands not yet supported")]
#[diagnostic(code(nu::shell::external_commands), url(docsrs))]
ExternalNotSupported(#[label = "external not supported"] Span),
#[error("Internal error: {0}.")]
#[diagnostic(code(nu::shell::internal_error), url(docsrs))]
InternalError(String),
VariableNotFoundAtRuntime(Span),
CantConvert(String, Span),
DivisionByZero(Span),
CannotCreateRange(Span),
AccessBeyondEnd(usize, Span),
AccessBeyondEndOfStream(Span),
IncompatiblePathAccess(String, Span),
CantFindColumn(Span),
#[error("Variable not found")]
#[diagnostic(code(nu::shell::variable_not_found), url(docsrs))]
VariableNotFoundAtRuntime(#[label = "variable not found"] Span),
#[error("Can't convert to {0}.")]
#[diagnostic(code(nu::shell::cant_convert), url(docsrs))]
CantConvert(String, #[label("can't convert to {0}")] Span),
#[error("Division by zero.")]
#[diagnostic(code(nu::shell::division_by_zero), url(docsrs))]
DivisionByZero(#[label("division by zero")] Span),
#[error("Can't convert range to countable values")]
#[diagnostic(code(nu::shell::range_to_countable), url(docsrs))]
CannotCreateRange(#[label = "can't convert to countable values"] Span),
#[error("Row number too large (max: {0}).")]
#[diagnostic(code(nu::shell::access_beyond_end), url(docsrs))]
AccessBeyondEnd(usize, #[label = "too large"] Span),
#[error("Row number too large.")]
#[diagnostic(code(nu::shell::access_beyond_end_of_stream), url(docsrs))]
AccessBeyondEndOfStream(#[label = "too large"] Span),
#[error("Data cannot be accessed with a cell path")]
#[diagnostic(code(nu::shell::incompatible_path_access), url(docsrs))]
IncompatiblePathAccess(String, #[label("{0} doesn't support cell paths")] Span),
#[error("Cannot find column")]
#[diagnostic(code(nu::shell::column_not_found), url(docsrs))]
CantFindColumn(#[label = "cannot find column"] Span),
#[error("External command")]
#[diagnostic(code(nu::shell::external_command), url(docsrs))]
ExternalCommand(String, #[label("{0}")] Span),
#[error("Unsupported input")]
#[diagnostic(code(nu::shell::unsupported_input), url(docsrs))]
UnsupportedInput(String, #[label("{0}")] Span),
#[error("Flag not found")]
#[diagnostic(code(nu::shell::flag_not_found), url(docsrs))]
FlagNotFound(String, #[label("{0} not found")] Span),
}

View File

@ -1,9 +1,23 @@
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
use miette::SourceSpan;
use serde::{Deserialize, Serialize};
pub struct Spanned<T> {
pub item: T,
pub span: Span,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Span {
pub start: usize,
pub end: usize,
}
impl From<Span> for SourceSpan {
fn from(s: Span) -> Self {
Self::new(s.start.into(), (s.end - s.start).into())
}
}
impl Span {
pub fn new(start: usize, end: usize) -> Span {
Span { start, end }

View File

@ -33,8 +33,11 @@ pub enum SyntaxShape {
/// A glob pattern is allowed, eg `foo*`
GlobPattern,
/// A module path pattern used for imports
ImportPattern,
/// A block is allowed, eg `{start this thing}`
Block,
Block(Option<Vec<SyntaxShape>>),
/// A table is allowed, eg `[[first, second]; [1, 2]]`
Table,
@ -69,20 +72,25 @@ pub enum SyntaxShape {
/// A general expression, eg `1 + 2` or `foo --bar`
Expression,
/// A custom shape with custom completion logic
Custom(Box<SyntaxShape>, String),
}
impl SyntaxShape {
pub fn to_type(&self) -> Type {
match self {
SyntaxShape::Any => Type::Unknown,
SyntaxShape::Block => Type::Block,
SyntaxShape::Block(_) => Type::Block,
SyntaxShape::CellPath => Type::Unknown,
SyntaxShape::Custom(custom, _) => custom.to_type(),
SyntaxShape::Duration => Type::Duration,
SyntaxShape::Expression => Type::Unknown,
SyntaxShape::FilePath => Type::FilePath,
SyntaxShape::Filesize => Type::Filesize,
SyntaxShape::FullCellPath => Type::Unknown,
SyntaxShape::GlobPattern => Type::String,
SyntaxShape::ImportPattern => Type::Unknown,
SyntaxShape::Int => Type::Int,
SyntaxShape::List(x) => {
let contents = x.to_type();

View File

@ -1,6 +1,8 @@
use serde::{Deserialize, Serialize};
use std::fmt::Display;
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum Type {
Int,
Float,
@ -16,9 +18,11 @@ pub enum Type {
Number,
Nothing,
Record(Vec<String>, Vec<Type>),
Table,
ValueStream,
Unknown,
Error,
Binary,
}
impl Display for Type {
@ -34,6 +38,7 @@ impl Display for Type {
Type::Int => write!(f, "int"),
Type::Range => write!(f, "range"),
Type::Record(cols, vals) => write!(f, "record<{}, {:?}>", cols.join(", "), vals),
Type::Table => write!(f, "table"),
Type::List(l) => write!(f, "list<{}>", l),
Type::Nothing => write!(f, "nothing"),
Type::Number => write!(f, "number"),
@ -41,6 +46,7 @@ impl Display for Type {
Type::ValueStream => write!(f, "value stream"),
Type::Unknown => write!(f, "unknown"),
Type::Error => write!(f, "error"),
Type::Binary => write!(f, "binary"),
}
}
}

View File

@ -4,17 +4,18 @@ mod stream;
pub use range::*;
pub use row::*;
use serde::{Deserialize, Serialize};
pub use stream::*;
use std::fmt::Debug;
use crate::ast::{PathMember, RangeInclusion};
use crate::ast::PathMember;
use crate::{span, BlockId, Span, Type};
use crate::ShellError;
/// Core structured values that pass through the pipeline in engine-q
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Value {
Bool {
val: bool,
@ -24,6 +25,14 @@ pub enum Value {
val: i64,
span: Span,
},
Filesize {
val: u64,
span: Span,
},
Duration {
val: u64,
span: Span,
},
Range {
val: Box<Range>,
span: Span,
@ -59,6 +68,10 @@ pub enum Value {
Error {
error: ShellError,
},
Binary {
val: Vec<u8>,
span: Span,
},
}
impl Value {
@ -76,6 +89,8 @@ impl Value {
Value::Bool { span, .. } => *span,
Value::Int { span, .. } => *span,
Value::Float { span, .. } => *span,
Value::Filesize { span, .. } => *span,
Value::Duration { span, .. } => *span,
Value::Range { span, .. } => *span,
Value::String { span, .. } => *span,
Value::Record { span, .. } => *span,
@ -83,6 +98,7 @@ impl Value {
Value::Block { span, .. } => *span,
Value::Stream { span, .. } => *span,
Value::Nothing { span, .. } => *span,
Value::Binary { span, .. } => *span,
}
}
@ -92,6 +108,8 @@ impl Value {
Value::Bool { span, .. } => *span = new_span,
Value::Int { span, .. } => *span = new_span,
Value::Float { span, .. } => *span = new_span,
Value::Filesize { span, .. } => *span = new_span,
Value::Duration { span, .. } => *span = new_span,
Value::Range { span, .. } => *span = new_span,
Value::String { span, .. } => *span = new_span,
Value::Record { span, .. } => *span = new_span,
@ -100,6 +118,7 @@ impl Value {
Value::Block { span, .. } => *span = new_span,
Value::Nothing { span, .. } => *span = new_span,
Value::Error { .. } => {}
Value::Binary { span, .. } => *span = new_span,
}
self
@ -111,6 +130,8 @@ impl Value {
Value::Bool { .. } => Type::Bool,
Value::Int { .. } => Type::Int,
Value::Float { .. } => Type::Float,
Value::Filesize { .. } => Type::Filesize,
Value::Duration { .. } => Type::Duration,
Value::Range { .. } => Type::Range,
Value::String { .. } => Type::String,
Value::Record { cols, vals, .. } => {
@ -121,6 +142,7 @@ impl Value {
Value::Block { .. } => Type::Block,
Value::Stream { .. } => Type::ValueStream,
Value::Error { .. } => Type::Error,
Value::Binary { .. } => Type::Binary,
}
}
@ -130,21 +152,13 @@ impl Value {
Value::Bool { val, .. } => val.to_string(),
Value::Int { val, .. } => val.to_string(),
Value::Float { val, .. } => val.to_string(),
Value::Filesize { val, .. } => format!("{} bytes", val),
Value::Duration { val, .. } => format!("{} ns", val),
Value::Range { val, .. } => {
let vals: Vec<i64> = match (&val.from, &val.to) {
(Value::Int { val: from, .. }, Value::Int { val: to, .. }) => {
match val.inclusion {
RangeInclusion::Inclusive => (*from..=*to).collect(),
RangeInclusion::RightExclusive => (*from..*to).collect(),
}
}
_ => Vec::new(),
};
format!(
"range: [{}]",
vals.iter()
.map(|x| x.to_string())
val.into_iter()
.map(|x| x.into_string())
.collect::<Vec<String>>()
.join(", ")
)
@ -169,6 +183,38 @@ impl Value {
Value::Block { val, .. } => format!("<Block {}>", val),
Value::Nothing { .. } => String::new(),
Value::Error { error } => format!("{:?}", error),
Value::Binary { val, .. } => format!("{:?}", val),
}
}
pub fn collect_string(self) -> String {
match self {
Value::Bool { val, .. } => val.to_string(),
Value::Int { val, .. } => val.to_string(),
Value::Float { val, .. } => val.to_string(),
Value::Filesize { val, .. } => format!("{} bytes", val),
Value::Duration { val, .. } => format!("{} ns", val),
Value::Range { val, .. } => val
.into_iter()
.map(|x| x.into_string())
.collect::<Vec<String>>()
.join(", "),
Value::String { val, .. } => val,
Value::Stream { stream, .. } => stream.collect_string(),
Value::List { vals: val, .. } => val
.into_iter()
.map(|x| x.collect_string())
.collect::<Vec<_>>()
.join("\n"),
Value::Record { vals, .. } => vals
.into_iter()
.map(|y| y.collect_string())
.collect::<Vec<_>>()
.join("\n"),
Value::Block { val, .. } => format!("<Block {}>", val),
Value::Nothing { .. } => String::new(),
Value::Error { error } => format!("{:?}", error),
Value::Binary { val, .. } => format!("{:?}", val),
}
}
@ -180,9 +226,9 @@ impl Value {
}
/// Follow a given column path into the value: for example accessing nth elements in a stream or list
pub fn follow_cell_path(self, column_path: &[PathMember]) -> Result<Value, ShellError> {
pub fn follow_cell_path(self, cell_path: &[PathMember]) -> Result<Value, ShellError> {
let mut current = self;
for member in column_path {
for member in cell_path {
// FIXME: this uses a few extra clones for simplicity, but there may be a way
// to traverse the path without them
match member {
@ -278,6 +324,24 @@ impl Value {
Ok(current)
}
pub fn string(s: &str, span: Span) -> Value {
Value::String {
val: s.into(),
span,
}
}
pub fn is_true(&self) -> bool {
matches!(self, Value::Bool { val: true, .. })
}
pub fn columns(&self) -> Vec<String> {
match self {
Value::Record { cols, .. } => cols.clone(),
_ => vec![],
}
}
}
impl PartialEq for Value {

Some files were not shown because too many files have changed in this diff Show More