diff --git a/.azure/azure-pipelines.yml b/.azure/azure-pipelines.yml index e1f9b9368..2ab7e05c4 100644 --- a/.azure/azure-pipelines.yml +++ b/.azure/azure-pipelines.yml @@ -5,10 +5,25 @@ strategy: matrix: linux-nightly: image: ubuntu-16.04 + style: 'unflagged' macos-nightly: image: macos-10.14 + style: 'unflagged' windows-nightly: image: vs2017-win2016 + style: 'unflagged' + linux-nightly-canary: + image: ubuntu-16.04 + style: 'canary' + macos-nightly-canary: + image: macos-10.14 + style: 'canary' + windows-nightly-canary: + image: vs2017-win2016 + style: 'canary' + fmt: + image: ubuntu-16.04 + style: 'fmt' pool: vmImage: $(image) @@ -27,6 +42,11 @@ steps: rustup component add rustfmt --toolchain `cat rust-toolchain` displayName: Install Rust - bash: RUSTFLAGS="-D warnings" cargo test --all-features + condition: eq(variables['style'], 'unflagged') + displayName: Run tests + - bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all-features + condition: eq(variables['style'], 'canary') displayName: Run tests - bash: cargo fmt --all -- --check + condition: eq(variables['style'], 'fmt') displayName: Lint diff --git a/Cargo.lock b/Cargo.lock index 852fbd610..da4718920 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,7 +2,7 @@ # It is not intended for manual editing. [[package]] name = "adler32" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -47,10 +47,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "arrayvec" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -67,9 +67,9 @@ name = "async-stream-impl" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -77,24 +77,24 @@ name = "atty" version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "autocfg" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "backtrace" -version = "0.3.34" +version = "0.3.38" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -103,7 +103,7 @@ version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -119,10 +119,10 @@ name = "battery" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -138,32 +138,32 @@ dependencies = [ "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bincode" -version = "1.1.4" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bitflags" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "blake2b_simd" -version = "0.5.6" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -180,34 +180,34 @@ dependencies = [ "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "decimal 2.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "md5 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bstr" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bumpalo" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "byte-unit" -version = "3.0.1" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -226,7 +226,7 @@ version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -234,7 +234,7 @@ name = "c2-chacha" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -243,13 +243,13 @@ name = "cc" version = "1.0.45" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "jobserver 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", + "jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "cfg-if" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -257,10 +257,10 @@ name = "chrono" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -279,7 +279,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -311,7 +311,7 @@ name = "cloudabi" version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -319,19 +319,19 @@ name = "config" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "constant_time_eq" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -340,7 +340,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -353,7 +353,7 @@ name = "crc32fast" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -374,8 +374,8 @@ name = "crossbeam-utils" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -409,7 +409,7 @@ dependencies = [ "crossterm_screen 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_utils 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -441,7 +441,7 @@ dependencies = [ "crossterm_cursor 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_utils 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -450,7 +450,7 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -467,11 +467,11 @@ name = "csv" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bstr 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -484,11 +484,11 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.9" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -502,30 +502,29 @@ dependencies = [ [[package]] name = "curl" -version = "0.4.22" +version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", - "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)", - "schannel 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)", + "schannel 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", "socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "curl-sys" -version = "0.4.20" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -536,7 +535,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "darwin-libproc-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -545,7 +544,7 @@ name = "darwin-libproc-sys" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -553,12 +552,12 @@ name = "decimal" version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "ord_subset 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -566,7 +565,7 @@ name = "deflate" version = "0.7.20" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -575,9 +574,9 @@ name = "derive-new" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -590,7 +589,7 @@ name = "directories" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -599,7 +598,7 @@ name = "dirs" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -609,7 +608,7 @@ name = "dirs" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -618,8 +617,8 @@ name = "dirs-sys" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -636,66 +635,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "either" -version = "1.5.2" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "encode_unicode" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "enum-utils" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "enum-utils-from-str 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive_internals 0.24.1 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "enum-utils-from-str" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "env_logger" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "failure" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "backtrace 0.3.34 (registry+https://github.com/rust-lang/crates.io-index)", - "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "backtrace 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", + "failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "failure_derive" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", - "synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "synstructure 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -715,13 +692,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "flate2" -version = "1.0.9" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz_oxide_c_api 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "miniz_oxide 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -736,7 +713,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures" -version = "0.1.28" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -804,7 +781,7 @@ name = "futures-util-preview" version = "0.3.0-alpha.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", @@ -826,11 +803,12 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.1.8" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -838,9 +816,9 @@ name = "getset" version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -848,8 +826,8 @@ name = "git2" version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -870,33 +848,33 @@ dependencies = [ [[package]] name = "heim" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-cpu 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-disk 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-host 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-memory 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-net 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-process 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-sensors 0.0.3-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-virt 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-disk 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-memory 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-process 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-sensors 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-virt 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-common" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -906,41 +884,41 @@ dependencies = [ [[package]] name = "heim-cpu" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-derive" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-disk" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -948,66 +926,66 @@ dependencies = [ [[package]] name = "heim-host" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "platforms 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "platforms 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-memory" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-net" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "macaddr 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-process" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "darwin-libproc 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-cpu 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-host 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-net 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1017,35 +995,35 @@ dependencies = [ [[package]] name = "heim-runtime" -version = "0.0.4-alpha.1" +version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-sensors" -version = "0.0.3-alpha.1" +version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-virt" -version = "0.0.8-alpha.1" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1071,7 +1049,7 @@ dependencies = [ [[package]] name = "humantime" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1089,11 +1067,11 @@ dependencies = [ [[package]] name = "image" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "jpeg-decoder 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", "num-iter 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", "num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1105,7 +1083,7 @@ name = "indexmap" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1113,35 +1091,34 @@ name = "inflate" version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "iovec" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "isahc" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", - "curl 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)", - "curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)", "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "sluice 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "sluice 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1149,8 +1126,8 @@ name = "isatty" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1160,7 +1137,7 @@ name = "itertools" version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1168,7 +1145,7 @@ name = "itertools" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1178,17 +1155,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "jobserver" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "jpeg-decoder" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1196,10 +1173,10 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1220,8 +1197,8 @@ dependencies = [ "itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1232,7 +1209,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "lazy_static" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1242,19 +1219,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "lexical-core" -version = "0.4.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "stackvector 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "static_assertions 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libc" -version = "0.2.60" +version = "0.2.62" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1263,9 +1240,9 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1274,7 +1251,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1283,7 +1260,7 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1293,8 +1270,8 @@ version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1303,7 +1280,7 @@ name = "line-wrap" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "safemem 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "safemem 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1325,7 +1302,7 @@ name = "log" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1346,7 +1323,7 @@ name = "mach" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1354,7 +1331,7 @@ name = "mach" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1362,7 +1339,7 @@ name = "malloc_buf" version = "0.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1380,7 +1357,7 @@ name = "memchr" version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1394,35 +1371,15 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", - "unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "miniz-sys" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "miniz_oxide" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "miniz_oxide_c_api" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz_oxide 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1435,12 +1392,12 @@ name = "neso" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1448,10 +1405,10 @@ name = "nix" version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1460,16 +1417,16 @@ name = "nix" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "nodrop" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1483,14 +1440,33 @@ dependencies = [ [[package]] name = "nom" -version = "5.0.0" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lexical-core 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "nom-tracable" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "nom-tracable-macros" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "nom_locate" version = "1.0.0" @@ -1498,7 +1474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytecount 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1520,7 +1496,7 @@ dependencies = [ "battery 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)", - "byte-unit 3.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "byte-unit 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "chrono-humanize 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1532,16 +1508,15 @@ dependencies = [ "derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "dunce 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "enum-utils 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-timer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "getset 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "heim 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)", + "heim 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "image 0.22.2 (registry+https://github.com/rust-lang/crates.io-index)", + "image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)", "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "language-reporting 0.3.1 (git+https://github.com/wycats/language-reporting)", @@ -1549,30 +1524,31 @@ dependencies = [ "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", - "pretty-hex 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pretty-hex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "ptree 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "roxmltree 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_bytes 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)", "shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "sublime_fuzzy 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1584,7 +1560,6 @@ dependencies = [ "toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1593,10 +1568,10 @@ name = "num-bigint" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1604,7 +1579,7 @@ name = "num-integer" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1613,7 +1588,7 @@ name = "num-iter" version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1623,7 +1598,7 @@ name = "num-rational" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1641,7 +1616,7 @@ name = "num-traits" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1649,7 +1624,7 @@ name = "num_cpus" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1689,12 +1664,12 @@ dependencies = [ [[package]] name = "onig" -version = "4.3.2" +version = "4.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1704,7 +1679,7 @@ version = "69.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1714,13 +1689,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "openssl-sys" -version = "0.9.49" +version = "0.9.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1771,12 +1746,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pkg-config" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "platforms" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1786,9 +1761,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "line-wrap 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1797,7 +1772,7 @@ name = "png" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "deflate 0.7.20 (registry+https://github.com/rust-lang/crates.io-index)", "inflate 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1810,7 +1785,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pretty-hex" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1819,7 +1794,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ctor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "ctor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1841,23 +1816,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "csv 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "encode_unicode 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "proc-macro2" -version = "0.4.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "proc-macro2" -version = "1.0.1" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1873,9 +1840,9 @@ dependencies = [ "directories 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "tint 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1884,68 +1851,33 @@ name = "quick-error" version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "quote" -version = "0.6.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "quote" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rand" -version = "0.6.5" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand_chacha" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "rand_chacha" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1963,18 +1895,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rand_core" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_hc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1982,25 +1906,7 @@ name = "rand_hc" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_isaac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_jitter" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2010,35 +1916,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand_pcg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_xorshift" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "raw-cpuid" version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2077,7 +1966,7 @@ name = "redox_users" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2085,12 +1974,12 @@ dependencies = [ [[package]] name = "regex" -version = "1.2.1" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2104,7 +1993,7 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.11" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2132,10 +2021,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "roxmltree" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "xmlparser 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "xmlparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2143,7 +2032,7 @@ name = "rusqlite" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "fallible-streaming-iterator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "libsqlite3-sys 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2158,7 +2047,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", - "blake2b_simd 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", + "blake2b_simd 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2169,7 +2058,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rustc-demangle" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2191,7 +2080,7 @@ version = "5.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2203,12 +2092,12 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "safemem" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2221,10 +2110,10 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2248,10 +2137,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde" -version = "1.0.100" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2262,7 +2151,7 @@ dependencies = [ "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2271,10 +2160,10 @@ name = "serde-hjson" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2284,7 +2173,7 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2292,26 +2181,17 @@ name = "serde_bytes" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_derive" -version = "1.0.98" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "serde_derive_internals" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2320,19 +2200,19 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_json" -version = "1.0.40" +version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2350,18 +2230,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_yaml" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2386,7 +2266,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "sluice" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2404,8 +2284,8 @@ name = "socket2" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2415,18 +2295,9 @@ name = "sourcefile" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "stackvector" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "static_assertions" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2445,7 +2316,7 @@ version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2456,49 +2327,39 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "isahc 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "isahc 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-futures 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)", - "web-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-futures 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "syn" -version = "0.15.43" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "syn" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "synstructure" -version = "0.10.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2506,18 +2367,18 @@ name = "syntect" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "flate2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "onig 4.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "onig 4.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "plist 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2527,9 +2388,9 @@ name = "tempfile" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2551,7 +2412,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2560,7 +2421,7 @@ name = "termcolor" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "wincolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "wincolor 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2577,7 +2438,7 @@ name = "thread_local" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2593,7 +2454,7 @@ name = "time" version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2612,7 +2473,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2621,7 +2482,7 @@ name = "toml" version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2629,17 +2490,17 @@ name = "toml" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "typenum" -version = "1.10.0" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "unicase" -version = "2.4.0" +version = "2.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2671,31 +2532,18 @@ name = "unicode-width" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "unicode-xid" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "unicode-xid" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "unreachable" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "uom" version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2705,7 +2553,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2732,15 +2580,6 @@ name = "utf8parse" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "uuid" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "vcpkg" version = "0.2.7" @@ -2771,93 +2610,100 @@ dependencies = [ "winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "wasi" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "wasm-bindgen" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-macro 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bumpalo 2.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-futures" -version = "0.3.25" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-macro-support 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro-support 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "wasm-bindgen-webidl" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", "weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "web-sys" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-webidl 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-webidl 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2873,8 +2719,8 @@ name = "which" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2921,7 +2767,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "wincolor" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2933,8 +2779,8 @@ name = "x11" version = "2.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2950,7 +2796,7 @@ name = "xcb" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2966,7 +2812,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "xmlparser" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2978,36 +2824,36 @@ dependencies = [ ] [metadata] -"checksum adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7e522997b529f05601e05166c07ed17789691f562762c7f3b987263d2dedee5c" +"checksum adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5d2e7343e7fc9de883d1b0341e0b13970f764c14101234857d2ddafa1cb1cac2" "checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d" "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" "checksum ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" "checksum app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e73a24bad9bd6a94d6395382a6c69fe071708ae4409f763c5475e14ee896313d" "checksum arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0d382e583f07208808f6b1249e60848879ba3543f57c32277bf52d69c2f0f0ee" -"checksum arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b8d73f9beda665eaa98ab9e4f7442bd4e7de6652587de55b2525e52e29c1b0ba" +"checksum arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" "checksum async-stream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "650be9b667e47506c42ee53034fb1935443cb2447a3a5c0a75e303d2e756fa73" "checksum async-stream-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4f0d8c5b411e36dcfb04388bacfec54795726b1f0148adcb0f377a96d6747e0e" "checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90" -"checksum autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "22130e92352b948e7e82a49cdb0aa94f2211761117f29e052dd397c1ac33542b" -"checksum backtrace 0.3.34 (registry+https://github.com/rust-lang/crates.io-index)" = "b5164d292487f037ece34ec0de2fcede2faa162f085dd96d2385ab81b12765ba" +"checksum autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875" +"checksum backtrace 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)" = "690a62be8920ccf773ee00ef0968649b0e724cda8bd5b12286302b4ae955fdf5" "checksum backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "82a830b4ef2d1124a711c71d263c5abdc710ef8e907bd508c88be475cebc422b" "checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" "checksum battery 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6d6fe5630049e900227cd89afce4c1204b88ec8e61a2581bb96fcce26f047b" "checksum bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "460825c9e21708024d67c07057cd5560e5acdccac85de0de624a81d3de51bacb" -"checksum bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9f04a5e50dc80b3d5d35320889053637d15011aed5e66b66b37ae798c65da6f7" -"checksum bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d155346769a6855b86399e9bc3814ab343cd3d62c7e985113d46a0ec3c281fd" -"checksum blake2b_simd 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "461f4b879a8eb70c1debf7d0788a9a5ff15f1ea9d25925fea264ef4258bed6b2" +"checksum bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ab639324e3ee8774d296864fbc0dbbb256cf1a41c490b94cba90c082915f92" +"checksum bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8a606a02debe2813760609f57a64a2ffd27d9fdf5b2f133eaca0b248dd92cdd2" +"checksum blake2b_simd 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5850aeee1552f495dd0250014cf64b82b7c8879a89d83b33bbdace2cc4f63182" "checksum block 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a" "checksum bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d61895d21e2194d1ce1d434cff69025daac1e49a8b4698eb04b05722dbc08b33" -"checksum bstr 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e0a692f1c740e7e821ca71a22cf99b9b2322dfa94d10f71443befb1797b3946a" -"checksum bumpalo 2.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2cd43d82f27d68911e6ee11ee791fb248f138f5d69424dc02e098d4f152b0b05" -"checksum byte-unit 3.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90139954ec9776c4832d44f212e558ccdacbe915a881bf3de3a1a487fa8d1e87" +"checksum bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8d6c2c5b58ab920a4f5aeaaca34b4488074e8cc7596af94e6f8c6ff247c60245" +"checksum bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ad807f2fc2bf185eeb98ff3a901bd46dc5ad58163d0fa4577ba0d25674d71708" +"checksum byte-unit 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6894a79550807490d9f19a138a6da0f8830e70c83e83402dd23f16fd6c479056" "checksum bytecount 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f861d9ce359f56dbcb6e0c2a1cb84e52ad732cadb57b806adeb3c7668caccbd8" "checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" "checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" "checksum c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7d64d04786e0f528460fc884753cf8dddcc466be308f6026f8e355c41a0e4101" "checksum cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)" = "4fc9a35e1f4290eb9e5fc54ba6cf40671ed2a2514c3eeb2b2a908dda2ea5a1be" -"checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33" +"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" "checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68" "checksum chrono-humanize 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2ff48a655fe8d2dae9a39e66af7fd8ff32a879e8c4e27422c25596a8b5e90d" "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" @@ -3015,7 +2861,7 @@ dependencies = [ "checksum clipboard-win 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e3a093d6fed558e5fe24c3dfc85a68bb68f1c824f440d3ba5aca189e2998786b" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum config 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9107d78ed62b3fa5a86e7d18e647abed48cfd8f8fab6c72f4cdb982d196f7e6" -"checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" +"checksum constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "995a44c877f9212528ccc74b21a232f66ad69001e40ede5bcee2ac9ef2657120" "checksum core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d" "checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" "checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" @@ -3032,10 +2878,10 @@ dependencies = [ "checksum crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b055e7cc627c452e6a9b977022f48a2db6f0ff73df446ca970f95eef9c381d45" "checksum csv 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "37519ccdfd73a75821cac9319d4fce15a81b9fcf75f951df5b9988aa3a0af87d" "checksum csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9b5cadb6b25c77aeff80ba701712494213f4a8418fcda2ee11b6560c3ad0bf4c" -"checksum ctor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3b4c17619643c1252b5f690084b82639dd7fac141c57c8e77a00e0148132092c" +"checksum ctor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd8ce37ad4184ab2ce004c33bf6379185d3b1c95801cab51026bd271bf68eedc" "checksum ctrlc 3.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7dfd2d8b4c82121dfdff120f818e09fc4380b0b7e17a742081a89b94853e87f" -"checksum curl 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)" = "f8ed9a22aa8c4e49ac0c896279ef532a43a7df2f54fcd19fa36960de029f965f" -"checksum curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "5e90ae10f635645cba9cad1023535f54915a95c58c44751c6ed70dbaeb17a408" +"checksum curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)" = "06aa71e9208a54def20792d877bc663d6aae0732b9852e612c4a933177c31283" +"checksum curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)" = "f71cd2dbddb49c744c1c9e0b96106f50a634e8759ec51bcd5399a578700a3ab3" "checksum darwin-libproc 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ade5a88af8d9646bf770687321a9488a0f2b4610aa08b0373016cd1af37f0a31" "checksum darwin-libproc-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c30d1a078d74da1183b02fed8a8b07afc412d3998334b53b750d0ed03b031541" "checksum decimal 2.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e6458723bc760383275fbc02f4c769b2e5f3de782abaf5e7e0b9b7f0368a63ed" @@ -3048,20 +2894,18 @@ dependencies = [ "checksum dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "afa0b23de8fd801745c471deffa6e12d248f962c9fd4b4c33787b055599bde7b" "checksum dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ea57b42383d091c85abcc2706240b94ab2a8fa1fc81c10ff23c4de06e2a90b5e" "checksum dunce 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0ad6bf6a88548d1126045c413548df1453d9be094a8ab9fd59bf1fdd338da4f" -"checksum either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5527cfe0d098f36e3f8839852688e63c8fff1c90b2b405aef730615f9a7bcf7b" -"checksum encode_unicode 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "90b2c9496c001e8cb61827acdefad780795c42264c137744cae6f7d9e3450abd" -"checksum enum-utils 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f1ae672d9891879fb93e17ab6015c4e3bbe63fbeb23a41b9ac39ffa845b8836" -"checksum enum-utils-from-str 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6b5669381f76d7320e122abdd4a8307f986634f6d067fb69e31179422175801a" +"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" +"checksum encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" "checksum env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aafcde04e90a5226a6443b7aabdb016ba2f8307c847d524724bd9b346dd1a2d3" -"checksum failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2" -"checksum failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1" +"checksum failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f8273f13c977665c5db7eb2b99ae520952fe5ac831ae4cd09d80c4c7042b5ed9" +"checksum failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0bc225b78e0391e4b8683440bf2e63c2deeeb2ce5189eab46e2b68c6d3725d08" "checksum fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" "checksum fallible-streaming-iterator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" -"checksum flate2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "550934ad4808d5d39365e5d61727309bf18b3b02c6c56b729cb92e7dd84bc3d8" +"checksum flate2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ad3c5233c9a940c8719031b423d7e6c16af66e031cb0420b0896f5245bf181d3" "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" -"checksum futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "45dc39533a6cae6da2b56da48edae506bb767ec07370f86f70fc062e9d435869" +"checksum futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "1b980f2816d6ee8673b6517b52cb0e808a180efc92e5c19d02cdda79066703ef" "checksum futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "f477fd0292c4a4ae77044454e7f2b413207942ad405f759bb0b4698b7ace5b12" "checksum futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "4a2f26f774b81b3847dcda0c81bd4b6313acfb4f69e5a0390c7cb12c058953e9" "checksum futures-executor-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "80705612926df8a1bc05f0057e77460e29318801f988bf7d803a734cf54e7528" @@ -3071,47 +2915,47 @@ dependencies = [ "checksum futures-timer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "878f1d2fc31355fa02ed2372e741b0c17e58373341e6a122569b4623a14a7d33" "checksum futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "7df53daff1e98cc024bf2720f3ceb0414d96fbb0a94f3cad3a5c3bf3be1d261c" "checksum futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "36552cd31353fd135114510d53b8d120758120c36aa636a9341970f9efb1e4a0" -"checksum getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "34f33de6f0ae7c9cb5e574502a562e2b512799e32abb801cd1e79ad952b62b49" +"checksum getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "473a1265acc8ff1e808cd0a1af8cee3c2ee5200916058a2ca113c29f2d903571" "checksum getset 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "117a5b13aecd4e10161bb3feb22dda898e8552836c2391d8e4645d5e703ab866" "checksum git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39f27186fbb5ec67ece9a56990292bc5aed3c3fc51b9b07b0b52446b1dfb4a82" "checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" -"checksum heim 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "02692a4aa3bed77933da9ae7915aef7fcceb65eff9d9251be189b1acc0b77f65" -"checksum heim-common 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "559807533108e09863125eeccb38a7213cef5a7a7deadd3fac2674e1f8d3db70" -"checksum heim-cpu 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "60c237652eaa091b39f996deb41aa7baae67cab5f25204154c14414f46ef69c1" -"checksum heim-derive 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3f326db96a03106afcea6839b13f7d95b09cffd063eaa94ef0fd3e796214a66" -"checksum heim-disk 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bd75c64f2d054ce1297ad08f2ca41bf7db7e9ca868221b2fb7427210579e85a1" -"checksum heim-host 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6401c858723568a09e0f09e09bda833e0019c34aa512ccdeba236fce45e4eeb1" -"checksum heim-memory 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "424a549b6c3faecc2492cd3d49f1f89ed9f191c7995741b89e674b85a262e303" -"checksum heim-net 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d0ebbcbabe86dbc1c8713ecc1f54630549f82fa07520083cf9a0edcdd77d329a" -"checksum heim-process 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "564f0d9d123c708688721fb2c2aacc198bd5eec3d995eb8c25d369500c66ca7d" -"checksum heim-runtime 0.0.4-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "df59b2a6e00b7f4532dc00736d74bf721a4587d4dbf90793c524ed0a7eddfa19" -"checksum heim-sensors 0.0.3-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "512afc3c0562aa26ae4e236a4b371901fbf7ddac843c961b2ef201936e79a7cd" -"checksum heim-virt 0.0.8-alpha.1 (registry+https://github.com/rust-lang/crates.io-index)" = "95372a84d2a0a5709899449fbb8ed296a9ce5b9fc0ba4729f0c26f7d5ebdf155" +"checksum heim 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "de848466ae9659d5ab634615bdd0b7d558a41ae524ee4d59c880d12499af5b77" +"checksum heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "63f408c31e695732096a0383df16cd3efee4adb32ba3ad086fb85a7dc8f53100" +"checksum heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5785004dfdbd68a814d504b27b8ddc16c748a856835dfb6e65b15142090664ef" +"checksum heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "9573bedf4673c1b254bce7f1521559329d2b27995b693b695fa13be2b15c188b" +"checksum heim-disk 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c84980e62564828ae4ca70a8bfbdb0f139cc89abb6c91b8b4809518346a72366" +"checksum heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1de019d5969f6bab766311be378788bd1bb068b59c4f3861c539a420fc258ed3" +"checksum heim-memory 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "a9cdbe6433197da8387dcd0cf1afd9184db4385d55f8a76355b28ceabe99cdc5" +"checksum heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7b0f5e590eb2f8b23229ff4b06f7e7aee0e229837d3697f362014343682ae073" +"checksum heim-process 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "a64874316339b9c0c7953e7a87d2b32e2400bf6778650ac11b76b05d3c37e121" +"checksum heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "13ef10b5ab5a501e6537b1414db0e3c488425d88bb131bd4e9ff7c0e61e5fbd1" +"checksum heim-sensors 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ad8b3c9032bca1a76dd43e1eb5c8044e0c505343cb21949dc7acd1bc55b408b" +"checksum heim-virt 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "bb2dda5314da10a8fbcdf130c065abc65f02c3ace72c6f143ad4537520536e2b" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" "checksum hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "023b39be39e3a2da62a94feb433e91e8bcd37676fbc8bea371daf52b7a769a3e" "checksum http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "372bcb56f939e449117fb0869c2e8fd8753a8223d92a172c6e808cf123a5b6e4" -"checksum humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca7e5f2e110db35f93b837c81797f3714500b81d517bf20c431b16d3ca4f114" +"checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" "checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" -"checksum image 0.22.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ee0665404aa0f2ad154021777b785878b0e5b1c1da030455abc3d9ed257c2c67" +"checksum image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b4be8aaefbe7545dc42ae925afb55a0098f226a3fe5ef721872806f44f57826" "checksum indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a61202fbe46c4a951e9404a720a0180bcf3212c750d735cb5c4ba4dc551299f3" "checksum inflate 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1cdb29978cc5797bd8dcc8e5bf7de604891df2a8dc576973d71a281e916db2ff" -"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08" -"checksum isahc 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e1b971511b5d8de4a51d4da4bc8e374bf60ce841e91b116f46ae06ae2e2a8e9b" +"checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +"checksum isahc 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "769f5071e5bf0b45489eefe0ec96b97328675db38d02ea5e923519d52e690cb8" "checksum isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e31a8281fc93ec9693494da65fbf28c0c2aa60a2eaec25dc58e2f31952e95edc" "checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d" "checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" "checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f" -"checksum jobserver 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "f74e73053eaf95399bf926e48fc7a2a3ce50bd0eaaa2357d391e95b2dcdd4f10" -"checksum jpeg-decoder 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "c8b7d43206b34b3f94ea9445174bda196e772049b9bddbc620c9d29b2d20110d" -"checksum js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "1efc4f2a556c58e79c5500912e221dd826bec64ff4aabd8ce71ccef6da02d7d4" +"checksum jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "f2b1d42ef453b30b7387e113da1c83ab1605d90c5b4e0eb8e96d016ed3b8c160" +"checksum jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "c1aae18ffeeae409c6622c3b6a7ee49792a7e5a062eea1b135fbb74e301792ba" +"checksum js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "2cc9a97d7cec30128fd8b28a7c1f9df1c001ceb9b441e2b755e24130a6b43c79" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum language-reporting 0.3.1 (git+https://github.com/wycats/language-reporting)" = "" "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" -"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14" +"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" -"checksum lexical-core 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b8b0f90c979adde96d19eb10eb6431ba0c441e2f9e9bdff868b2f6f5114ff519" -"checksum libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)" = "d44e80633f007889c7eff624b709ab43c92d708caad982295768a7b13ca3b5eb" +"checksum lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2304bccb228c4b020f3a4835d247df0a02a7c4686098d4167762cfbbe4c5cb14" +"checksum libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "34fcd2c08d2f832f376f4173a231990fa5aef4e99fb569867318a227ef4c06ba" "checksum libgit2-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a30f8637eb59616ee3b8a00f6adff781ee4ddd8343a615b8238de756060cc1b3" "checksum libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02254d44f4435dd79e695f2c2b83cd06a47919adea30216ceaf0c57ca0a72463" "checksum libsqlite3-sys 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5e5b95e89c330291768dc840238db7f9e204fd208511ab6319b56193a7f2ae25" @@ -3130,16 +2974,16 @@ dependencies = [ "checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e" "checksum mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "dd1d63acd1b78403cc0c325605908475dd9b9a3acbf65ed8bcab97e27014afcf" "checksum mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1a0ed03949aef72dbdf3116a383d7b38b4768e6f960528cd6a6044aa9ed68599" -"checksum miniz-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9e3ae51cea1576ceba0dde3d484d30e6e5b86dee0b2d412fe3a16a15c98202" -"checksum miniz_oxide 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fe2959c5a0747a8d7a56b4444c252ffd2dda5d452cfd147cdfdda73b1c3ece5b" -"checksum miniz_oxide_c_api 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6c675792957b0d19933816c4e1d56663c341dd9bfa31cb2140ff2267c1d8ecf4" +"checksum miniz_oxide 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "304f66c19be2afa56530fa7c39796192eef38618da8d19df725ad7c6d6b2aaae" "checksum natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd659d7d6b4554da2c0e7a486d5952b24dfce0e0bac88ab53b270f4efe1010a6" "checksum neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6b3c31defbcb081163db18437fd88c2a267cb3e26f7bd5e4b186e4b1b38fe8c8" "checksum nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c722bee1037d430d0f8e687bbdbf222f27cc6e4e68d5caf630857bb2b6dbdce" "checksum nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3b2e0b4f3320ed72aaedb9a5ac838690a8047c7b275da22711fddff4f8a14229" -"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" +"checksum nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" "checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" -"checksum nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e9761d859320e381010a4f7f8ed425f2c924de33ad121ace447367c713ad561b" +"checksum nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c618b63422da4401283884e6668d39f819a106ef51f5f59b81add00075da35ca" +"checksum nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "edaa64ad2837d831d4a17966c9a83aa5101cc320730f5b724811c8f7442a2528" +"checksum nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd25f70877a9fe68bd406b3dd3ff99e94ce9de776cf2a96e0d99de90b53d4765" "checksum nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f932834fd8e391fc7710e2ba17e8f9f8645d846b55aa63207e17e110a1e1ce35" "checksum ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f26e041cd983acbc087e30fcba770380cfa352d0e392e175b2344ebaf7ea0602" "checksum num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9c3f34cdd24f334cb265d9bf8bfa8a241920d026916785747a92f0e55541a1a" @@ -3153,10 +2997,10 @@ dependencies = [ "checksum objc-foundation 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1add1b659e36c9607c7aab864a76c7a4c2760cd0cd2e120f3fb8b952c7e22bf9" "checksum objc_id 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c92d4ddb4bd7b50d730c215ff871754d0da6b2178849f8a2a2ab69712d0c073b" "checksum ole32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d2c49021782e5233cd243168edfa8037574afed4eba4bbaf538b3d8d1789d8c" -"checksum onig 4.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a646989adad8a19f49be2090374712931c3a59835cb5277b4530f48b417f26e7" +"checksum onig 4.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8518fcb2b1b8c2f45f0ad499df4fda6087fc3475ca69a185c173b8315d2fb383" "checksum onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388410bf5fa341f10e58e6db3975f4bea1ac30247dd79d37a9e5ced3cb4cc3b0" "checksum openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" -"checksum openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)" = "f4fad9e54bd23bd4cbbe48fdc08a1b8091707ac869ef8508edea2fec77dcc884" +"checksum openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)" = "ba24190c8f0805d3bd2ce028f439fe5af1d55882bbe6261bed1dbc93b50dd6b1" "checksum ord_subset 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d7ce14664caf5b27f5656ff727defd68ae1eb75ef3c4d95259361df1eb376bef" "checksum ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "18869315e81473c951eb56ad5558bbc56978562d3ecfb87abb7a1e944cea4518" "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" @@ -3164,90 +3008,78 @@ dependencies = [ "checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" "checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f" "checksum pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5894c618ce612a3fa23881b152b608bafb8c56cfc22f434a3ba3120b40f7b587" -"checksum pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c1d2cfa5a714db3b5f24f0915e74fcdf91d09d496ba61329705dda7774d2af" -"checksum platforms 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6cfec0daac55b13af394ceaaad095d17c790f77bdc9329264f06e49d6cd3206c" +"checksum pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "72d5370d90f49f70bd033c3d75e87fc529fbfff9d6f7cccef07d6170079d91ea" +"checksum platforms 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "feb3b2b1033b8a60b4da6ee470325f887758c95d5320f52f9ce0df055a55940e" "checksum plist 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a9f075f6394100e7c105ed1af73fb1859d6fd14e49d4290d578120beb167f" "checksum png 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8422b27bb2c013dd97b9aef69e161ce262236f49aaf46a0489011c8ff0264602" "checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b" -"checksum pretty-hex 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "119929a2a3b731bb3d888f7a1b5dc3c1db28b6c134def5d99f7e16e2da16b8f7" +"checksum pretty-hex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be91bcc43e73799dc46a6c194a55e7aae1d86cc867c860fd4a436019af21bd8c" "checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" "checksum pretty_env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "717ee476b1690853d222af4634056d830b5197ffd747726a9a1eee6da9f49074" "checksum prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0fd04b170004fa2daccf418a7f8253aaf033c27760b5f225889024cf66d7ac2e" -"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" -"checksum proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5c2380ae88876faae57698be9e9775e3544decad214599c3a6266cca6ac802" +"checksum proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0" "checksum ptree 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6b0a3be00b19ee7bd33238c1c523a7ab4df697345f6b36f90827a7860ea938d4" "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" -"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" -"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" -"checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c" -"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412" "checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" "checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" -"checksum rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "615e683324e75af5d43d8f7a39ffe3ee4a9dc42c5c701167a71dc59c3a493aca" -"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" "checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" -"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" -"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" "checksum raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4a349ca83373cfa5d6dbb66fd76e58b2cca08da71a5f6400de0a0a6a9bceeaf" "checksum rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "33ec17a493dcb820725c002bc253f6f3ba4e4dc635e72c238540691b05e43897" "checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" "checksum readkey 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d98db94bb4f3e926c8d8186547cd9366d958d753aff5801214d93d38214e8f0f" "checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" "checksum redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecedbca3bf205f8d8f5c2b44d83cd0690e39ee84b951ed649e9f1841132b66d" -"checksum regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88c3d9193984285d544df4a30c23a4e62ead42edf70a4452ceb76dac1ce05c26" +"checksum regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd" "checksum regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "92b73c2a1770c255c240eaa4ee600df1704a38dc3feaa6e949e7fcd4f8dc09f9" -"checksum regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b143cceb2ca5e56d5671988ef8b15615733e7ee16cd348e064333b251b89343f" +"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum render-tree 0.1.1 (git+https://github.com/wycats/language-reporting)" = "" "checksum result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "194d8e591e405d1eecf28819740abed6d719d1a2db87fc0bcdedee9a26d55560" -"checksum roxmltree 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "153c367ce9fb8ef7afe637ef92bd083ba0f88b03ef3fcf0287d40be05ae0a61c" +"checksum roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1a3193e568c6e262f817fd07af085c7f79241a947aedd3779d47eadc170e174" "checksum rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2a194373ef527035645a1bc21b10dc2125f73497e6e155771233eb187aedd051" "checksum rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ca4eaef519b494d1f2848fc602d18816fed808a981aedf4f1f00ceb7c9d32cf" "checksum rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2" -"checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" +"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" "checksum rustyline 5.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4795e277e6e57dec9df62b515cd4991371daa80e8dc8d80d596e58722b89c417" -"checksum ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c92464b447c0ee8c4fb3824ecc8383b81717b9f1e74ba2e72540aef7b9f82997" -"checksum safemem 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e133ccc4f4d1cd4f89cc8a7ff618287d56dc7f638b8e38fc32c5fdcadc339dd5" +"checksum ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "19d2271fa48eaf61e53cc88b4ad9adcbafa2d512c531e7fadb6dc11a4d3656c5" +"checksum safemem 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d2b08423011dae9a5ca23f07cf57dac3857f5c885d352b76f6d95f4aea9434d0" "checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421" -"checksum schannel 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f6abf258d99c3c1c5c2131d99d064e94b7b3dd5f416483057f308fea253339" +"checksum schannel 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "87f550b06b6cba9c8b8be3ee73f391990116bf527450d2556e9b9ce263b9a021" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8" -"checksum serde 1.0.100 (registry+https://github.com/rust-lang/crates.io-index)" = "f4473e8506b213730ff2061073b48fa51dcc66349219e2e7c5608f0296a1d95a" +"checksum serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)" = "9796c9b7ba2ffe7a9ce53c2287dfc48080f4b2b362fcc245a259b3a7201119dd" "checksum serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0b833c5ad67d52ced5f5938b2980f32a9c1c5ef047f0b4fb3127e7a423c76153" "checksum serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8" "checksum serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7a663f873dedc4eac1a559d4c6bc0d0b2c34dc5ac4702e105014b8281489e44f" "checksum serde_bytes 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "45af0182ff64abaeea290235eb67da3825a576c5d53e642c4d5b652e12e6effc" -"checksum serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)" = "01e69e1b8a631f245467ee275b8c757b818653c6d704cdbcaeb56b56767b529c" -"checksum serde_derive_internals 0.24.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8a80c6c0b1ebbcea4ec2c7e9e2e9fa197a425d17f1afec8ba79fcd1352b18ffb" +"checksum serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)" = "4b133a43a1ecd55d4086bd5b4dc6c1751c68b1bfbeba7a5040442022c7e7c02e" "checksum serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eb236687e2bb073a7521c021949be944641e671b8505a94069ca37b656c81139" -"checksum serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)" = "051c49229f282f7c6f3813f8286cc1e3323e8051823fce42c7ea80fe13521704" +"checksum serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "2f72eb2a68a7dc3f9a691bfda9305a1c017a6215e5a4545c258500d2099a37c2" "checksum serde_test 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "110b3dbdf8607ec493c22d5d947753282f3bae73c0f56d322af1e8c78e4c23d5" "checksum serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97" -"checksum serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "38b08a9a90e5260fe01c6480ec7c811606df6d3a660415808c3c3fa8ed95b582" +"checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35" "checksum shell32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ee04b46101f57121c9da2b151988283b6beb79b34f5bb29a58ee48cb695122c" "checksum shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de7a5b5a9142fd278a10e0209b021a1b85849352e6951f4f914735c976737564" "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" -"checksum sluice 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ec70d7c3b17c262d4a18f7291c6ce62bf47170915f3b795434d3c5c49a4e59b7" +"checksum sluice 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0a7d06dfb3e8743bc19e6de8a302277471d08077d68946b307280496dc5a3531" "checksum smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ab606a9c5e214920bb66c458cd7be8ef094f813f20fe77a54cc7dbfff220d4b7" "checksum socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "e8b74de517221a2cb01a53349cf54182acdc31a074727d3079068448c0676d85" "checksum sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4bf77cb82ba8453b42b6ae1d692e4cdc92f9a47beaf89a847c8be83f4e328ad3" -"checksum stackvector 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1c4725650978235083241fab0fdc8e694c3de37821524e7534a1a9061d1068af" -"checksum static_assertions 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4f8de36da215253eb5f24020bfaa0646613b48bf7ebe36cdfa37c3b3b33b241" +"checksum static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7f3eb36b47e512f8f1c9e3d10c2c1965bc992bd9cdb024fa581e2194501c83d3" "checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" "checksum sublime_fuzzy 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97bd7ad698ea493a3a7f60c2ffa117c234f341e09f8cc2d39cef10cdde077acf" "checksum subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "28fc0f40f0c0da73339d347aa7d6d2b90341a95683a47722bc4eebed71ff3c00" "checksum surf 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "018eed64aede455beb88505d50c5c64882bebbe0996d4b660c272e3d8bb6f883" -"checksum syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)" = "ee06ea4b620ab59a2267c6b48be16244a3389f8bfa0986bdd15c35b890b00af3" -"checksum syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c65d951ab12d976b61a41cf9ed4531fc19735c6e6d84a4bb1453711e762ec731" -"checksum synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f" +"checksum syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf" +"checksum synstructure 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f085a5855930c0441ca1288cf044ea4aecf4f43a91668abdb870b4ba546a203" "checksum syntect 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e80b8831c5a543192ffc3727f01cf0e57579c6ac15558e3048bfb5708892167b" "checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" "checksum term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "edd106a334b7657c10b7c540a0106114feadeb4dc314513e97df481d5d966f42" @@ -3261,34 +3093,32 @@ dependencies = [ "checksum tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "5090db468dad16e1a7a54c8c67280c5e4b544f3d3e018f0b913b400261f85926" "checksum toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f" "checksum toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7aabe75941d914b72bf3e5d3932ed92ce0664d49d8432305a8b547c37227724" -"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" -"checksum unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a84e5511b2a947f3ae965dcb29b13b7b1691b6e7332cf5dbc1744138d5acb7f6" +"checksum typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d2783fe2d6b8c1101136184eb41be8b1ad379e4657050b8aaff0c79ee7575f9" +"checksum unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" "checksum unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "141339a08b982d942be2ca06ff8b076563cbe223d1befd5450716790d44e2426" "checksum unicode-segmentation 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1967f4cdfc355b37fd76d2a954fb2ed3871034eb4f26d60537d88795cfc332a9" "checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20" -"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" -"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" "checksum uom 0.23.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3ef5bbe8385736e498dbb0033361f764ab43a435192513861447b9f7714b3fec" "checksum uom 0.25.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3198c29f199fa8a23d732f4aa21ddc4f4d0a257cb0c2a44afea30145ce2575c1" "checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61" "checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47" "checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d" -"checksum uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a" "checksum vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "33dd455d0f96e90a75803cfeb7f948768c08d70a6de9a8d2362461935698bf95" "checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9658c94fa8b940eab2250bd5a457f9c48b748420d71293b165c8cdbe2f55f71e" -"checksum wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "dcddca308b16cd93c2b67b126c688e5467e4ef2e28200dc7dfe4ae284f2faefc" -"checksum wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "f805d9328b5fc7e5c6399960fd1889271b9b58ae17bdb2417472156cc9fafdd0" -"checksum wasm-bindgen-futures 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)" = "73c25810ee684c909488c214f55abcbc560beb62146d352b9588519e73c2fed9" -"checksum wasm-bindgen-macro 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "3ff88201a482abfc63921621f6cb18eb1efd74f136b05e5841e7f8ca434539e9" -"checksum wasm-bindgen-macro-support 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "6a433d89ecdb9f77d46fcf00c8cf9f3467b7de9954d8710c175f61e2e245bb0e" -"checksum wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "d41fc1bc3570cdf8d108c15e014045fd45a95bb5eb36605f96a90461fc34027d" -"checksum wasm-bindgen-webidl 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "be53d289bf2fa7645a089cfd5c7a34bf4fe94221f58cf86ee42a7b4bc854ff14" -"checksum web-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "6435c477200ad486089a7a72c2bd6c9bdf9740bd7fff868806076218076d8c51" +"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d" +"checksum wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "cd34c5ba0d228317ce388e87724633c57edca3e7531feb4e25e35aaa07a656af" +"checksum wasm-bindgen-backend 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "927196b315c23eed2748442ba675a4c54a1a079d90d9bdc5ad16ce31cf90b15b" +"checksum wasm-bindgen-futures 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "83420b37346c311b9ed822af41ec2e82839bfe99867ec6c54e2da43b7538771c" +"checksum wasm-bindgen-macro 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "92c2442bf04d89792816650820c3fb407af8da987a9f10028d5317f5b04c2b4a" +"checksum wasm-bindgen-macro-support 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "9c075d27b7991c68ca0f77fe628c3513e64f8c477d422b859e03f28751b46fc5" +"checksum wasm-bindgen-shared 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "83d61fe986a7af038dd8b5ec660e5849cbd9f38e7492b9404cc48b2b4df731d1" +"checksum wasm-bindgen-webidl 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "9b979afb0535fe4749906a674082db1211de8aef466331d43232f63accb7c07c" +"checksum web-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "c84440699cd02ca23bed6f045ffb1497bc18a3c2628bd13e2093186faaaacf6b" "checksum weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bb43f70885151e629e2a19ce9e50bd730fd436cfd4b666894c9ce4de9141164" "checksum which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b57acb10231b9493c8472b20cb57317d0679a49e0bdbee44b3b803a6473af164" "checksum widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "effc0e4ff8085673ea7b9b2e3c73f6bd4d118810c9009ed8f1e16bd96c331db6" @@ -3298,11 +3128,11 @@ dependencies = [ "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" "checksum winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7168bab6e1daee33b4557efd0e95d5ca70a03706d39fa5f3fe7a236f584b03c9" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -"checksum wincolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "561ed901ae465d6185fa7864d63fbd5720d0ef718366c9a4dc83cf6170d7e9ba" +"checksum wincolor 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "96f5016b18804d24db43cebf3c77269e7569b8954a8464501c216cc5e070eaa9" "checksum x11 2.18.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39697e3123f715483d311b5826e254b6f3cfebdd83cf7ef3358f579c3d68e235" "checksum x11-clipboard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "89bd49c06c9eb5d98e6ba6536cf64ac9f7ee3a009b2f53996d405b3944f6bcea" "checksum xcb 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5e917a3f24142e9ff8be2414e36c649d47d6cc2ba81f16201cdef96e533e02de" "checksum xdg 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d089681aa106a86fade1b0128fb5daf07d5867a509ab036d99988dec80429a57" "checksum xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "541b12c998c5b56aa2b4e6f18f03664eef9a4fd0a246a55594efae6cc2d964b5" -"checksum xmlparser 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ecec95f00fb0ff019153e64ea520f87d1409769db3e8f4db3ea588638a3e1cee" +"checksum xmlparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8110496c5bcc0d966b0b2da38d5a791aa139eeb0b80e7840a7463c2b806921eb" "checksum yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65923dd1784f44da1d2c3dbbc5e822045628c590ba72123e1c73d3c230c4434d" diff --git a/Cargo.toml b/Cargo.toml index f51ea06d8..955beeddf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -55,7 +55,7 @@ surf = "1.0.2" url = "2.1.0" roxmltree = "0.7.0" nom_locate = "1.0.0" -enum-utils = "0.1.1" +nom-tracable = "0.4.0" unicode-xid = "0.2.0" serde_ini = "0.2.0" subprocess = "0.1.18" @@ -65,7 +65,6 @@ hex = "0.3.2" tempfile = "3.1.0" semver = "0.9.0" which = "2.0.1" -uuid = {version = "0.7.4", features = [ "v4", "serde" ]} textwrap = {version = "0.11.0", features = ["term_size"]} shellexpand = "1.0.0" futures-timer = "0.4.0" @@ -75,13 +74,13 @@ bigdecimal = { version = "0.1.0", features = ["serde"] } natural = "0.3.0" serde_urlencoded = "0.6.1" sublime_fuzzy = "0.5" -regex = "1" +regex = {version = "1", optional = true } neso = { version = "0.5.0", optional = true } crossterm = { version = "0.10.2", optional = true } syntect = {version = "3.2.0", optional = true } onig_sys = {version = "=69.1.0", optional = true } -heim = {version = "0.0.8-alpha.1", optional = true } +heim = {version = "0.0.8", optional = true } battery = {version = "0.7.4", optional = true } rawkey = {version = "0.1.2", optional = true } clipboard = {version = "0.5", optional = true } @@ -95,6 +94,8 @@ textview = ["syntect", "onig_sys", "crossterm"] binaryview = ["image", "crossterm"] sys = ["heim", "battery"] ps = ["heim"] +# trace = ["nom-tracable/trace"] +all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"] [dependencies.rusqlite] version = "0.20.0" @@ -103,6 +104,10 @@ features = ["bundled", "blob"] [dev-dependencies] pretty_assertions = "0.6.1" +[build-dependencies] +toml = "0.5.3" +serde = { version = "1.0.101", features = ["derive"] } + [lib] name = "nu" path = "src/lib.rs" @@ -138,6 +143,7 @@ path = "src/plugins/skip.rs" [[bin]] name = "nu_plugin_match" path = "src/plugins/match.rs" +required-features = ["regex"] [[bin]] name = "nu_plugin_sys" diff --git a/build.rs b/build.rs new file mode 100644 index 000000000..44a55f957 --- /dev/null +++ b/build.rs @@ -0,0 +1,39 @@ +use serde::Deserialize; +use std::collections::HashMap; +use std::collections::HashSet; +use std::env; +use std::path::Path; + +#[derive(Deserialize)] +struct Feature { + #[allow(unused)] + description: String, + enabled: bool, +} + +fn main() -> Result<(), Box> { + let input = env::var("CARGO_MANIFEST_DIR").unwrap(); + let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok(); + let flags: HashSet = env::var("NUSHELL_ENABLE_FLAGS") + .map(|s| s.split(",").map(|s| s.to_string()).collect()) + .unwrap_or_else(|_| HashSet::new()); + + if all_on && !flags.is_empty() { + println!( + "cargo:warning={}", + "Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both." + ); + } + + let path = Path::new(&input).join("features.toml"); + + let toml: HashMap = toml::from_str(&std::fs::read_to_string(path)?)?; + + for (key, value) in toml.iter() { + if value.enabled == true || all_on || flags.contains(key) { + println!("cargo:rustc-cfg={}", key); + } + } + + Ok(()) +} diff --git a/features.toml b/features.toml new file mode 100644 index 000000000..290f673d2 --- /dev/null +++ b/features.toml @@ -0,0 +1,4 @@ +[hintsv1] + +description = "Adding hints based upon error states in the syntax highlighter" +enabled = false diff --git a/src/cli.rs b/src/cli.rs index 5bfd7ff68..0182ad100 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,4 +1,3 @@ -use crate::commands::autoview; use crate::commands::classified::{ ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand, StreamNext, @@ -13,7 +12,12 @@ pub(crate) use crate::errors::ShellError; use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult}; use crate::git::current_branch; use crate::parser::registry::Signature; -use crate::parser::{hir, CallNode, Pipeline, PipelineElement, TokenNode}; +use crate::parser::{ + hir, + hir::syntax_shape::{expand_syntax, PipelineShape}, + hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator}, + TokenNode, +}; use crate::prelude::*; use log::{debug, trace}; @@ -24,7 +28,7 @@ use std::error::Error; use std::io::{BufRead, BufReader, Write}; use std::iter::Iterator; use std::path::PathBuf; -use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::atomic::Ordering; #[derive(Debug)] pub enum MaybeOwned<'a, T> { @@ -75,7 +79,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel let name = params.name.clone(); let fname = fname.to_string(); - if context.has_command(&name) { + if let Some(_) = context.get_command(&name) { trace!("plugin {:?} already loaded.", &name); } else { if params.is_filter { @@ -94,11 +98,17 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel }, Err(e) => { trace!("incompatible plugin {:?}", input); - Err(ShellError::string(format!("Error: {:?}", e))) + Err(ShellError::untagged_runtime_error(format!( + "Error: {:?}", + e + ))) } } } - Err(e) => Err(ShellError::string(format!("Error: {:?}", e))), + Err(e) => Err(ShellError::untagged_runtime_error(format!( + "Error: {:?}", + e + ))), }; let _ = child.wait(); @@ -315,6 +325,7 @@ pub async fn cli() -> Result<(), Box> { )]); } } + let _ = load_plugins(&mut context); let config = Config::builder().color_mode(ColorMode::Forced).build(); @@ -328,24 +339,21 @@ pub async fn cli() -> Result<(), Box> { // we are ok if history does not exist let _ = rl.load_history(&History::path()); - let ctrl_c = Arc::new(AtomicBool::new(false)); - let cc = ctrl_c.clone(); + let cc = context.ctrl_c.clone(); ctrlc::set_handler(move || { cc.store(true, Ordering::SeqCst); }) .expect("Error setting Ctrl-C handler"); let mut ctrlcbreak = false; loop { - if ctrl_c.load(Ordering::SeqCst) { - ctrl_c.store(false, Ordering::SeqCst); + if context.ctrl_c.load(Ordering::SeqCst) { + context.ctrl_c.store(false, Ordering::SeqCst); continue; } let cwd = context.shell_manager.path(); - rl.set_helper(Some(crate::shell::Helper::new( - context.shell_manager.clone(), - ))); + rl.set_helper(Some(crate::shell::Helper::new(context.clone()))); let edit_mode = config::config(Tag::unknown())? .get("edit_mode") @@ -429,21 +437,11 @@ pub async fn cli() -> Result<(), Box> { } } - LineResult::Error(mut line, err) => { + LineResult::Error(line, err) => { rl.add_history_entry(line.clone()); - let diag = err.to_diagnostic(); + context.with_host(|host| { - let writer = host.err_termcolor(); - line.push_str(" "); - let files = crate::parser::Files::new(line); - let _ = std::panic::catch_unwind(move || { - let _ = language_reporting::emit( - &mut writer.lock(), - &files, - &diag, - &language_reporting::DefaultConfig, - ); - }); + print_err(err, host, &Text::from(line)); }) } @@ -460,6 +458,14 @@ pub async fn cli() -> Result<(), Box> { Ok(()) } +fn chomp_newline(s: &str) -> &str { + if s.ends_with('\n') { + &s[..s.len() - 1] + } else { + s + } +} + enum LineResult { Success(String), Error(String, ShellError), @@ -472,9 +478,11 @@ async fn process_line(readline: Result, ctx: &mut Context Ok(line) if line.trim() == "" => LineResult::Success(line.clone()), Ok(line) => { - let result = match crate::parser::parse(&line, uuid::Uuid::nil()) { + let line = chomp_newline(line); + + let result = match crate::parser::parse(&line) { Err(err) => { - return LineResult::Error(line.clone(), err); + return LineResult::Error(line.to_string(), err); } Ok(val) => val, @@ -485,7 +493,7 @@ async fn process_line(readline: Result, ctx: &mut Context let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) { Ok(pipeline) => pipeline, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }; match pipeline.commands.last() { @@ -493,7 +501,7 @@ async fn process_line(readline: Result, ctx: &mut Context _ => pipeline .commands .push(ClassifiedCommand::Internal(InternalCommand { - command: whole_stream_command(autoview::Autoview), + name: "autoview".to_string(), name_tag: Tag::unknown(), args: hir::Call::new( Box::new(hir::Expression::synthetic_string("autoview")), @@ -515,16 +523,24 @@ async fn process_line(readline: Result, ctx: &mut Context input = match (item, next) { (None, _) => break, + (Some(ClassifiedCommand::Dynamic(_)), _) + | (_, Some(ClassifiedCommand::Dynamic(_))) => { + return LineResult::Error( + line.to_string(), + ShellError::unimplemented("Dynamic commands"), + ) + } + (Some(ClassifiedCommand::Expr(_)), _) => { return LineResult::Error( - line.clone(), + line.to_string(), ShellError::unimplemented("Expression-only commands"), ) } (_, Some(ClassifiedCommand::Expr(_))) => { return LineResult::Error( - line.clone(), + line.to_string(), ShellError::unimplemented("Expression-only commands"), ) } @@ -532,31 +548,46 @@ async fn process_line(readline: Result, ctx: &mut Context ( Some(ClassifiedCommand::Internal(left)), Some(ClassifiedCommand::External(_)), - ) => match left - .run(ctx, input, Text::from(line), is_first_command) - .await - { + ) => match left.run(ctx, input, Text::from(line), is_first_command) { Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }, (Some(ClassifiedCommand::Internal(left)), Some(_)) => { - match left - .run(ctx, input, Text::from(line), is_first_command) - .await - { + match left.run(ctx, input, Text::from(line), is_first_command) { Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } (Some(ClassifiedCommand::Internal(left)), None) => { - match left - .run(ctx, input, Text::from(line), is_first_command) - .await - { - Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + match left.run(ctx, input, Text::from(line), is_first_command) { + Ok(val) => { + use futures::stream::TryStreamExt; + + let mut output_stream: OutputStream = val.into(); + loop { + match output_stream.try_next().await { + Ok(Some(ReturnSuccess::Value(Tagged { + item: Value::Error(e), + .. + }))) => { + return LineResult::Error(line.to_string(), e); + } + Ok(Some(_item)) => { + if ctx.ctrl_c.load(Ordering::SeqCst) { + break; + } + } + _ => { + break; + } + } + } + + return LineResult::Success(line.to_string()); + } + Err(err) => return LineResult::Error(line.to_string(), err), } } @@ -565,20 +596,20 @@ async fn process_line(readline: Result, ctx: &mut Context Some(ClassifiedCommand::External(_)), ) => match left.run(ctx, input, StreamNext::External).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }, (Some(ClassifiedCommand::External(left)), Some(_)) => { match left.run(ctx, input, StreamNext::Internal).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } (Some(ClassifiedCommand::External(left)), None) => { match left.run(ctx, input, StreamNext::Last).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } }; @@ -586,7 +617,7 @@ async fn process_line(readline: Result, ctx: &mut Context is_first_command = false; } - LineResult::Success(line.clone()) + LineResult::Success(line.to_string()) } Err(ReadlineError::Interrupted) => LineResult::CtrlC, Err(ReadlineError::Eof) => LineResult::Break, @@ -602,95 +633,52 @@ fn classify_pipeline( context: &Context, source: &Text, ) -> Result { - let pipeline = pipeline.as_pipeline()?; + let mut pipeline_list = vec![pipeline.clone()]; + let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span()); - let Pipeline { parts, .. } = pipeline; - - let commands: Result, ShellError> = parts - .iter() - .map(|item| classify_command(&item, context, &source)) - .collect(); - - Ok(ClassifiedPipeline { - commands: commands?, - }) -} - -fn classify_command( - command: &PipelineElement, - context: &Context, - source: &Text, -) -> Result { - let call = command.call(); - - match call { - // If the command starts with `^`, treat it as an external command no matter what - call if call.head().is_external() => { - let name_tag = call.head().expect_external(); - let name = name_tag.slice(source); - - Ok(external_command(call, source, name.tagged(name_tag))) - } - - // Otherwise, if the command is a bare word, we'll need to triage it - call if call.head().is_bare() => { - let head = call.head(); - let name = head.source(source); - - match context.has_command(name) { - // if the command is in the registry, it's an internal command - true => { - let command = context.get_command(name); - let config = command.signature(); - - trace!(target: "nu::build_pipeline", "classifying {:?}", config); - - let args: hir::Call = config.parse_args(call, &context, source)?; - - trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source)); - - Ok(ClassifiedCommand::Internal(InternalCommand { - command, - name_tag: head.tag(), - args, - })) - } - - // otherwise, it's an external command - false => Ok(external_command(call, source, name.tagged(head.tag()))), - } - } - - // If the command is something else (like a number or a variable), that is currently unsupported. - // We might support `$somevar` as a curried command in the future. - call => Err(ShellError::invalid_command(call.head().tag())), - } + expand_syntax( + &PipelineShape, + &mut iterator, + &context.expand_context(source, pipeline.span()), + ) } // Classify this command as an external command, which doesn't give special meaning // to nu syntactic constructs, and passes all arguments to the external command as // strings. -fn external_command( - call: &Tagged, +pub(crate) fn external_command( + tokens: &mut TokensIterator, source: &Text, name: Tagged<&str>, -) -> ClassifiedCommand { - let arg_list_strings: Vec> = match call.children() { - Some(args) => args +) -> Result { + let arg_list_strings = expand_external_tokens(tokens, source)?; + + Ok(ClassifiedCommand::External(ExternalCommand { + name: name.to_string(), + name_tag: name.tag(), + args: arg_list_strings .iter() - .filter_map(|i| match i { - TokenNode::Whitespace(_) => None, - other => Some(other.as_external_arg(source).tagged(other.tag())), + .map(|x| Tagged { + tag: x.span.into(), + item: x.item.clone(), }) .collect(), - None => vec![], - }; - - let (name, tag) = name.into_parts(); - - ClassifiedCommand::External(ExternalCommand { - name: name.to_string(), - name_tag: tag, - args: arg_list_strings, - }) + })) +} + +pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) { + let diag = err.to_diagnostic(); + + let writer = host.err_termcolor(); + let mut source = source.to_string(); + source.push_str(" "); + let files = crate::parser::Files::new(source); + let _ = std::panic::catch_unwind(move || { + let _ = language_reporting::emit( + &mut writer.lock(), + &files, + &diag, + &language_reporting::DefaultConfig, + ); + }); } diff --git a/src/commands.rs b/src/commands.rs index 93729aef6..61a45dbb3 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -76,6 +76,7 @@ pub(crate) use command::{ UnevaluatedCallInfo, WholeStreamCommand, }; +pub(crate) use classified::ClassifiedCommand; pub(crate) use config::Config; pub(crate) use cp::Cpy; pub(crate) use date::Date; diff --git a/src/commands/autoview.rs b/src/commands/autoview.rs index a0e7e9a8a..4f7d7172a 100644 --- a/src/commands/autoview.rs +++ b/src/commands/autoview.rs @@ -1,9 +1,14 @@ use crate::commands::{RawCommandArgs, WholeStreamCommand}; use crate::errors::ShellError; +use crate::parser::hir::{Expression, NamedArguments}; use crate::prelude::*; +use futures::stream::TryStreamExt; +use std::sync::atomic::Ordering; pub struct Autoview; +const STREAM_PAGE_SIZE: u64 = 50; + #[derive(Deserialize)] pub struct AutoviewArgs {} @@ -31,61 +36,132 @@ impl WholeStreamCommand for Autoview { pub fn autoview( AutoviewArgs {}: AutoviewArgs, - mut context: RunnableContext, + context: RunnableContext, raw: RawCommandArgs, ) -> Result { - Ok(OutputStream::new(async_stream! { - let input = context.input.drain_vec().await; + let binary = context.get_command("binaryview"); + let text = context.get_command("textview"); + let table = context.get_command("table"); - if input.len() > 0 { - if let Tagged { - item: Value::Primitive(Primitive::Binary(_)), - .. - } = input[0usize] - { - let binary = context.get_command("binaryview"); - if let Some(binary) = binary { - let result = binary.run(raw.with_input(input), &context.commands, false); - result.collect::>().await; - } else { - for i in input { - match i.item { - Value::Primitive(Primitive::Binary(b)) => { - use pretty_hex::*; - println!("{:?}", b.hex_dump()); + Ok(OutputStream::new(async_stream! { + let mut output_stream: OutputStream = context.input.into(); + + match output_stream.try_next().await { + Ok(Some(x)) => { + match output_stream.try_next().await { + Ok(Some(y)) => { + let ctrl_c = context.ctrl_c.clone(); + let stream = async_stream! { + yield Ok(x); + yield Ok(y); + + loop { + match output_stream.try_next().await { + Ok(Some(z)) => { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + yield Ok(z); + } + _ => break, + } + } + }; + if let Some(table) = table { + let mut new_output_stream: OutputStream = stream.to_output_stream(); + let mut finished = false; + let mut current_idx = 0; + loop { + let mut new_input = VecDeque::new(); + + for _ in 0..STREAM_PAGE_SIZE { + match new_output_stream.try_next().await { + + Ok(Some(a)) => { + if let ReturnSuccess::Value(v) = a { + new_input.push_back(v); + } + } + _ => { + finished = true; + break; + } + } + } + + let raw = raw.clone(); + + let mut command_args = raw.with_input(new_input.into()); + let mut named_args = NamedArguments::new(); + named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown()))); + command_args.call_info.args.named = Some(named_args); + + let result = table.run(command_args, &context.commands, false); + result.collect::>().await; + + if finished { + break; + } else { + current_idx += STREAM_PAGE_SIZE; + } } - _ => {} } } - }; - } else if is_single_anchored_text_value(&input) { - let text = context.get_command("textview"); - if let Some(text) = text { - let result = text.run(raw.with_input(input), &context.commands, false); - result.collect::>().await; - } else { - for i in input { - match i.item { - Value::Primitive(Primitive::String(s)) => { - println!("{}", s); + _ => { + if let ReturnSuccess::Value(x) = x { + match x { + Tagged { + item: Value::Primitive(Primitive::String(ref s)), + tag: Tag { anchor, span }, + } if anchor.is_some() => { + if let Some(text) = text { + let mut stream = VecDeque::new(); + stream.push_back(Value::string(s).tagged(Tag { anchor, span })); + let result = text.run(raw.with_input(stream.into()), &context.commands, false); + result.collect::>().await; + } else { + println!("{}", s); + } + } + Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + } => { + println!("{}", s); + } + + Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => { + if let Some(binary) = binary { + let mut stream = VecDeque::new(); + stream.push_back(x.clone()); + let result = binary.run(raw.with_input(stream.into()), &context.commands, false); + result.collect::>().await; + } else { + use pretty_hex::*; + println!("{:?}", b.hex_dump()); + } + } + + Tagged { item: Value::Error(e), .. } => { + yield Err(e); + } + Tagged { item: ref item, .. } => { + if let Some(table) = table { + let mut stream = VecDeque::new(); + stream.push_back(x.clone()); + let result = table.run(raw.with_input(stream.into()), &context.commands, false); + result.collect::>().await; + } else { + println!("{:?}", item); + } + } } - _ => {} } } } - } else if is_single_text_value(&input) { - for i in input { - match i.item { - Value::Primitive(Primitive::String(s)) => { - println!("{}", s); - } - _ => {} - } - } - } else { - let table = context.expect_command("table"); - let result = table.run(raw.with_input(input), &context.commands, false); - result.collect::>().await; + } + _ => { + //println!(""); } } @@ -95,34 +171,3 @@ pub fn autoview( } })) } - -fn is_single_text_value(input: &Vec>) -> bool { - if input.len() != 1 { - return false; - } - if let Tagged { - item: Value::Primitive(Primitive::String(_)), - .. - } = input[0] - { - true - } else { - false - } -} - -fn is_single_anchored_text_value(input: &Vec>) -> bool { - if input.len() != 1 { - return false; - } - - if let Tagged { - item: Value::Primitive(Primitive::String(_)), - tag: Tag { anchor, .. }, - } = input[0] - { - anchor != uuid::Uuid::nil() - } else { - false - } -} diff --git a/src/commands/classified.rs b/src/commands/classified.rs index 0e5cd95d8..c2380d4ff 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -1,12 +1,11 @@ -use crate::commands::Command; use crate::parser::{hir, TokenNode}; use crate::prelude::*; use bytes::{BufMut, BytesMut}; +use derive_new::new; use futures::stream::StreamExt; use futures_codec::{Decoder, Encoder, Framed}; use log::{log_enabled, trace}; use std::io::{Error, ErrorKind}; -use std::sync::Arc; use subprocess::Exec; /// A simple `Codec` implementation that splits up data into lines. @@ -73,25 +72,35 @@ impl ClassifiedInputStream { } } +#[derive(Debug)] pub(crate) struct ClassifiedPipeline { pub(crate) commands: Vec, } +#[derive(Debug, Eq, PartialEq)] pub(crate) enum ClassifiedCommand { #[allow(unused)] Expr(TokenNode), Internal(InternalCommand), + #[allow(unused)] + Dynamic(hir::Call), External(ExternalCommand), } +#[derive(new, Debug, Eq, PartialEq)] pub(crate) struct InternalCommand { - pub(crate) command: Arc, + pub(crate) name: String, pub(crate) name_tag: Tag, pub(crate) args: hir::Call, } +#[derive(new, Debug, Eq, PartialEq)] +pub(crate) struct DynamicCommand { + pub(crate) args: hir::Call, +} + impl InternalCommand { - pub(crate) async fn run( + pub(crate) fn run( self, context: &mut Context, input: ClassifiedInputStream, @@ -100,91 +109,99 @@ impl InternalCommand { ) -> Result { if log_enabled!(log::Level::Trace) { trace!(target: "nu::run::internal", "->"); - trace!(target: "nu::run::internal", "{}", self.command.name()); + trace!(target: "nu::run::internal", "{}", self.name); trace!(target: "nu::run::internal", "{}", self.args.debug(&source)); } let objects: InputStream = trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects); - let result = context.run_command( - self.command, - self.name_tag.clone(), - context.source_map.clone(), - self.args, - &source, - objects, - is_first_command, - ); + let command = context.expect_command(&self.name); + + let result = { + context.run_command( + command, + self.name_tag.clone(), + self.args, + &source, + objects, + is_first_command, + ) + }; let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result); let mut result = result.values; + let mut context = context.clone(); - let mut stream = VecDeque::new(); - while let Some(item) = result.next().await { - match item? { - ReturnSuccess::Action(action) => match action { - CommandAction::ChangePath(path) => { - context.shell_manager.set_path(path); - } - CommandAction::AddAnchorLocation(uuid, anchor_location) => { - context.add_anchor_location(uuid, anchor_location); - } - CommandAction::Exit => std::process::exit(0), // TODO: save history.txt - CommandAction::EnterHelpShell(value) => { - match value { - Tagged { - item: Value::Primitive(Primitive::String(cmd)), - tag, - } => { - context.shell_manager.insert_at_current(Box::new( - HelpShell::for_command( - Value::string(cmd).tagged(tag), - &context.registry(), - )?, - )); - } - _ => { - context.shell_manager.insert_at_current(Box::new( - HelpShell::index(&context.registry())?, - )); + let stream = async_stream! { + while let Some(item) = result.next().await { + match item { + Ok(ReturnSuccess::Action(action)) => match action { + CommandAction::ChangePath(path) => { + context.shell_manager.set_path(path); + } + CommandAction::Exit => std::process::exit(0), // TODO: save history.txt + CommandAction::EnterHelpShell(value) => { + match value { + Tagged { + item: Value::Primitive(Primitive::String(cmd)), + tag, + } => { + context.shell_manager.insert_at_current(Box::new( + HelpShell::for_command( + Value::string(cmd).tagged(tag), + &context.registry(), + ).unwrap(), + )); + } + _ => { + context.shell_manager.insert_at_current(Box::new( + HelpShell::index(&context.registry()).unwrap(), + )); + } } } - } - CommandAction::EnterValueShell(value) => { - context - .shell_manager - .insert_at_current(Box::new(ValueShell::new(value))); - } - CommandAction::EnterShell(location) => { - context.shell_manager.insert_at_current(Box::new( - FilesystemShell::with_location(location, context.registry().clone())?, - )); - } - CommandAction::PreviousShell => { - context.shell_manager.prev(); - } - CommandAction::NextShell => { - context.shell_manager.next(); - } - CommandAction::LeaveShell => { - context.shell_manager.remove_at_current(); - if context.shell_manager.is_empty() { - std::process::exit(0); // TODO: save history.txt + CommandAction::EnterValueShell(value) => { + context + .shell_manager + .insert_at_current(Box::new(ValueShell::new(value))); } - } - }, + CommandAction::EnterShell(location) => { + context.shell_manager.insert_at_current(Box::new( + FilesystemShell::with_location(location, context.registry().clone()).unwrap(), + )); + } + CommandAction::PreviousShell => { + context.shell_manager.prev(); + } + CommandAction::NextShell => { + context.shell_manager.next(); + } + CommandAction::LeaveShell => { + context.shell_manager.remove_at_current(); + if context.shell_manager.is_empty() { + std::process::exit(0); // TODO: save history.txt + } + } + }, - ReturnSuccess::Value(v) => { - stream.push_back(v); + Ok(ReturnSuccess::Value(v)) => { + yield Ok(v); + } + + Err(x) => { + yield Ok(Value::Error(x).tagged_unknown()); + break; + } } } - } + }; - Ok(stream.into()) + Ok(stream.to_input_stream()) } } +#[derive(Debug, Eq, PartialEq)] pub(crate) struct ExternalCommand { pub(crate) name: String, @@ -192,6 +209,7 @@ pub(crate) struct ExternalCommand { pub(crate) args: Vec>, } +#[derive(Debug)] pub(crate) enum StreamNext { Last, External, @@ -221,6 +239,8 @@ impl ExternalCommand { process = Exec::cmd(&self.name); + trace!(target: "nu::run::external", "command = {:?}", process); + if arg_string.contains("$it") { let mut first = true; @@ -239,7 +259,11 @@ impl ExternalCommand { tag, )); } else { - return Err(ShellError::string("Error: $it needs string data")); + return Err(ShellError::labeled_error( + "Error: $it needs string data", + "given something else", + name_tag, + )); } } if !first { @@ -275,6 +299,8 @@ impl ExternalCommand { process = process.cwd(context.shell_manager.path()); + trace!(target: "nu::run::external", "cwd = {:?}", context.shell_manager.path()); + let mut process = match stream_next { StreamNext::Last => process, StreamNext::External | StreamNext::Internal => { @@ -282,43 +308,59 @@ impl ExternalCommand { } }; + trace!(target: "nu::run::external", "set up stdout pipe"); + if let Some(stdin) = stdin { process = process.stdin(stdin); } - let mut popen = process.popen()?; + trace!(target: "nu::run::external", "set up stdin pipe"); + trace!(target: "nu::run::external", "built process {:?}", process); - match stream_next { - StreamNext::Last => { - let _ = popen.detach(); - loop { - match popen.poll() { - None => { - let _ = std::thread::sleep(std::time::Duration::new(0, 100000000)); - } - _ => { - let _ = popen.terminate(); - break; + let popen = process.popen(); + + trace!(target: "nu::run::external", "next = {:?}", stream_next); + + if let Ok(mut popen) = popen { + match stream_next { + StreamNext::Last => { + let _ = popen.detach(); + loop { + match popen.poll() { + None => { + let _ = std::thread::sleep(std::time::Duration::new(0, 100000000)); + } + _ => { + let _ = popen.terminate(); + break; + } } } + Ok(ClassifiedInputStream::new()) + } + StreamNext::External => { + let _ = popen.detach(); + let stdout = popen.stdout.take().unwrap(); + Ok(ClassifiedInputStream::from_stdout(stdout)) + } + StreamNext::Internal => { + let _ = popen.detach(); + let stdout = popen.stdout.take().unwrap(); + let file = futures::io::AllowStdIo::new(stdout); + let stream = Framed::new(file, LinesCodec {}); + let stream = + stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag)); + Ok(ClassifiedInputStream::from_input_stream( + stream.boxed() as BoxStream<'static, Tagged> + )) } - Ok(ClassifiedInputStream::new()) - } - StreamNext::External => { - let _ = popen.detach(); - let stdout = popen.stdout.take().unwrap(); - Ok(ClassifiedInputStream::from_stdout(stdout)) - } - StreamNext::Internal => { - let _ = popen.detach(); - let stdout = popen.stdout.take().unwrap(); - let file = futures::io::AllowStdIo::new(stdout); - let stream = Framed::new(file, LinesCodec {}); - let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(name_tag)); - Ok(ClassifiedInputStream::from_input_stream( - stream.boxed() as BoxStream<'static, Tagged> - )) } + } else { + return Err(ShellError::labeled_error( + "Command not found", + "command not found", + name_tag, + )); } } } diff --git a/src/commands/command.rs b/src/commands/command.rs index 95732abac..5f3f4809b 100644 --- a/src/commands/command.rs +++ b/src/commands/command.rs @@ -1,4 +1,3 @@ -use crate::context::{AnchorLocation, SourceMap}; use crate::data::Value; use crate::errors::ShellError; use crate::evaluate::Scope; @@ -11,13 +10,12 @@ use serde::{Deserialize, Serialize}; use std::fmt; use std::ops::Deref; use std::path::PathBuf; -use uuid::Uuid; +use std::sync::atomic::AtomicBool; #[derive(Deserialize, Serialize, Debug, Clone)] pub struct UnevaluatedCallInfo { pub args: hir::Call, pub source: Text, - pub source_map: SourceMap, pub name_tag: Tag, } @@ -37,7 +35,6 @@ impl UnevaluatedCallInfo { Ok(CallInfo { args, - source_map: self.source_map, name_tag: self.name_tag, }) } @@ -46,7 +43,6 @@ impl UnevaluatedCallInfo { #[derive(Deserialize, Serialize, Debug, Clone)] pub struct CallInfo { pub args: registry::EvaluatedArgs, - pub source_map: SourceMap, pub name_tag: Tag, } @@ -62,7 +58,7 @@ impl CallInfo { args: T::deserialize(&mut deserializer)?, context: RunnablePerItemContext { shell_manager: shell_manager.clone(), - name: self.name_tag, + name: self.name_tag.clone(), }, callback, }) @@ -73,6 +69,7 @@ impl CallInfo { #[get = "pub(crate)"] pub struct CommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: UnevaluatedCallInfo, pub input: InputStream, @@ -82,6 +79,7 @@ pub struct CommandArgs { #[get = "pub(crate)"] pub struct RawCommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: UnevaluatedCallInfo, } @@ -90,6 +88,7 @@ impl RawCommandArgs { pub fn with_input(self, input: Vec>) -> CommandArgs { CommandArgs { host: self.host, + ctrl_c: self.ctrl_c, shell_manager: self.shell_manager, call_info: self.call_info, input: input.into(), @@ -109,12 +108,14 @@ impl CommandArgs { registry: ®istry::CommandRegistry, ) -> Result { let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let shell_manager = self.shell_manager.clone(); let input = self.input; let call_info = self.call_info.evaluate(registry, &Scope::empty())?; Ok(EvaluatedWholeStreamCommandArgs::new( host, + ctrl_c, shell_manager, call_info, input, @@ -127,12 +128,13 @@ impl CommandArgs { callback: fn(T, RunnableContext) -> Result, ) -> Result, ShellError> { let shell_manager = self.shell_manager.clone(); - let source_map = self.call_info.source_map.clone(); let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let args = self.evaluate_once(registry)?; + let call_info = args.call_info.clone(); let (input, args) = args.split(); let name_tag = args.call_info.name_tag; - let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); + let mut deserializer = ConfigDeserializer::from_call_info(call_info); Ok(RunnableArgs { args: T::deserialize(&mut deserializer)?, @@ -141,8 +143,8 @@ impl CommandArgs { commands: registry.clone(), shell_manager, name: name_tag, - source_map, host, + ctrl_c, }, callback, }) @@ -155,17 +157,20 @@ impl CommandArgs { ) -> Result, ShellError> { let raw_args = RawCommandArgs { host: self.host.clone(), + ctrl_c: self.ctrl_c.clone(), shell_manager: self.shell_manager.clone(), call_info: self.call_info.clone(), }; let shell_manager = self.shell_manager.clone(); - let source_map = self.call_info.source_map.clone(); let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let args = self.evaluate_once(registry)?; + let call_info = args.call_info.clone(); + let (input, args) = args.split(); let name_tag = args.call_info.name_tag; - let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); + let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone()); Ok(RunnableRawArgs { args: T::deserialize(&mut deserializer)?, @@ -174,8 +179,8 @@ impl CommandArgs { commands: registry.clone(), shell_manager, name: name_tag, - source_map, host, + ctrl_c, }, raw_args, callback, @@ -198,18 +203,12 @@ pub struct RunnableContext { pub input: InputStream, pub shell_manager: ShellManager, pub host: Arc>, + pub ctrl_c: Arc, pub commands: CommandRegistry, - pub source_map: SourceMap, pub name: Tag, } impl RunnableContext { - pub fn expect_command(&self, name: &str) -> Arc { - self.commands - .get_command(name) - .expect(&format!("Expected command {}", name)) - } - pub fn get_command(&self, name: &str) -> Option> { self.commands.get_command(name) } @@ -270,6 +269,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs { impl EvaluatedWholeStreamCommandArgs { pub fn new( host: Arc>, + ctrl_c: Arc, shell_manager: ShellManager, call_info: CallInfo, input: impl Into, @@ -277,6 +277,7 @@ impl EvaluatedWholeStreamCommandArgs { EvaluatedWholeStreamCommandArgs { args: EvaluatedCommandArgs { host, + ctrl_c, shell_manager, call_info, }, @@ -285,7 +286,7 @@ impl EvaluatedWholeStreamCommandArgs { } pub fn name_tag(&self) -> Tag { - self.args.call_info.name_tag + self.args.call_info.name_tag.clone() } pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) { @@ -317,12 +318,14 @@ impl Deref for EvaluatedFilterCommandArgs { impl EvaluatedFilterCommandArgs { pub fn new( host: Arc>, + ctrl_c: Arc, shell_manager: ShellManager, call_info: CallInfo, ) -> EvaluatedFilterCommandArgs { EvaluatedFilterCommandArgs { args: EvaluatedCommandArgs { host, + ctrl_c, shell_manager, call_info, }, @@ -334,6 +337,7 @@ impl EvaluatedFilterCommandArgs { #[get = "pub(crate)"] pub struct EvaluatedCommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: CallInfo, } @@ -376,7 +380,6 @@ impl EvaluatedCommandArgs { #[derive(Debug, Serialize, Deserialize)] pub enum CommandAction { ChangePath(String), - AddAnchorLocation(Uuid, AnchorLocation), Exit, EnterShell(String), EnterValueShell(Tagged), @@ -390,9 +393,6 @@ impl ToDebug for CommandAction { fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { match self { CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s), - CommandAction::AddAnchorLocation(u, source) => { - write!(f, "action:add-span-source={}@{:?}", u, source) - } CommandAction::Exit => write!(f, "action:exit"), CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s), CommandAction::EnterValueShell(t) => { @@ -507,6 +507,15 @@ pub enum Command { PerItem(Arc), } +impl std::fmt::Debug for Command { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()), + Command::PerItem(command) => write!(f, "PerItem({})", command.name()), + } + } +} + impl Command { pub fn name(&self) -> &str { match self { @@ -555,6 +564,7 @@ impl Command { ) -> OutputStream { let raw_args = RawCommandArgs { host: args.host, + ctrl_c: args.ctrl_c, shell_manager: args.shell_manager, call_info: args.call_info, }; @@ -624,6 +634,7 @@ impl WholeStreamCommand for FnFilterCommand { ) -> Result { let CommandArgs { host, + ctrl_c, shell_manager, call_info, input, @@ -641,8 +652,12 @@ impl WholeStreamCommand for FnFilterCommand { Ok(args) => args, }; - let args = - EvaluatedFilterCommandArgs::new(host.clone(), shell_manager.clone(), call_info); + let args = EvaluatedFilterCommandArgs::new( + host.clone(), + ctrl_c.clone(), + shell_manager.clone(), + call_info, + ); match func(args) { Err(err) => return OutputStream::from(vec![Err(err)]).values, diff --git a/src/commands/config.rs b/src/commands/config.rs index 3b36c88fa..82fbbf1db 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -58,7 +58,7 @@ pub fn config( }: ConfigArgs, RunnableContext { name, .. }: RunnableContext, ) -> Result { - let name_span = name; + let name_span = name.clone(); let configuration = if let Some(supplied) = load { Some(supplied.item().clone()) @@ -70,9 +70,9 @@ pub fn config( if let Some(v) = get { let key = v.to_string(); - let value = result - .get(&key) - .ok_or_else(|| ShellError::string(&format!("Missing key {} in config", key)))?; + let value = result.get(&key).ok_or_else(|| { + ShellError::labeled_error(&format!("Missing key in config"), "key", v.tag()) + })?; let mut results = VecDeque::new(); @@ -120,10 +120,11 @@ pub fn config( result.swap_remove(&key); config::write(&result, &configuration)?; } else { - return Err(ShellError::string(&format!( + return Err(ShellError::labeled_error( "{} does not exist in config", - key - ))); + "key", + v.tag(), + )); } let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]); diff --git a/src/commands/date.rs b/src/commands/date.rs index 6df9e2720..bff6b550f 100644 --- a/src/commands/date.rs +++ b/src/commands/date.rs @@ -39,27 +39,27 @@ where { let mut indexmap = IndexMap::new(); - indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(tag)); - indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(tag)); - indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(tag)); - indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(tag)); - indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(tag)); - indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(tag)); + indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag)); + indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag)); + indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag)); + indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag)); + indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag)); + indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag)); let tz = dt.offset(); indexmap.insert( "timezone".to_string(), - Value::string(format!("{}", tz)).tagged(tag), + Value::string(format!("{}", tz)).tagged(&tag), ); - Value::Row(Dictionary::from(indexmap)).tagged(tag) + Value::Row(Dictionary::from(indexmap)).tagged(&tag) } pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; let mut date_out = VecDeque::new(); - let tag = args.call_info.name_tag; + let tag = args.call_info.name_tag.clone(); let value = if args.has("utc") { let utc: DateTime = Utc::now(); diff --git a/src/commands/echo.rs b/src/commands/echo.rs index 21188f54f..4483f9137 100644 --- a/src/commands/echo.rs +++ b/src/commands/echo.rs @@ -35,7 +35,7 @@ fn run( _registry: &CommandRegistry, _raw_args: &RawCommandArgs, ) -> Result { - let name = call_info.name_tag; + let name = call_info.name_tag.clone(); let mut output = String::new(); @@ -54,11 +54,10 @@ fn run( output.push_str(&s); } _ => { - return Err(ShellError::labeled_error( - "Expect a string from pipeline", - "not a string-compatible value", - i.tag(), - )); + return Err(ShellError::type_error( + "a string-compatible value", + i.tagged_type_name(), + )) } } } diff --git a/src/commands/enter.rs b/src/commands/enter.rs index 2d96fe865..4a400241e 100644 --- a/src/commands/enter.rs +++ b/src/commands/enter.rs @@ -15,7 +15,7 @@ impl PerItemCommand for Enter { } fn signature(&self) -> registry::Signature { - Signature::build("enter").required("location", SyntaxShape::Block) + Signature::build("enter").required("location", SyntaxShape::Path) } fn usage(&self) -> &str { @@ -33,14 +33,14 @@ impl PerItemCommand for Enter { let raw_args = raw_args.clone(); match call_info.args.expect_nth(0)? { Tagged { - item: Value::Primitive(Primitive::String(location)), + item: Value::Primitive(Primitive::Path(location)), .. } => { - let location = location.to_string(); - let location_clone = location.to_string(); + let location_string = location.display().to_string(); + let location_clone = location_string.clone(); if location.starts_with("help") { - let spec = location.split(":").collect::>(); + let spec = location_string.split(":").collect::>(); let (_, command) = (spec[0], spec[1]); @@ -67,7 +67,7 @@ impl PerItemCommand for Enter { let full_path = std::path::PathBuf::from(cwd); - let (file_extension, contents, contents_tag, anchor_location) = + let (file_extension, contents, contents_tag) = crate::commands::open::fetch( &full_path, &location_clone, @@ -75,18 +75,9 @@ impl PerItemCommand for Enter { ) .await.unwrap(); - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - match contents { Value::Primitive(Primitive::String(_)) => { - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); @@ -95,6 +86,7 @@ impl PerItemCommand for Enter { { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -103,7 +95,6 @@ impl PerItemCommand for Enter { named: None, }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, }, }; @@ -123,7 +114,7 @@ impl PerItemCommand for Enter { yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell( Tagged { item, - tag: contents_tag, + tag: contents_tag.clone(), }))); } x => yield x, diff --git a/src/commands/env.rs b/src/commands/env.rs index c0af78555..0572b499c 100644 --- a/src/commands/env.rs +++ b/src/commands/env.rs @@ -37,22 +37,22 @@ pub fn get_environment(tag: Tag) -> Result, Box Result, Box Result { let args = args.evaluate_once(registry)?; let mut env_out = VecDeque::new(); - let tag = args.call_info.name_tag; + let tag = args.call_info.name_tag.clone(); let value = get_environment(tag)?; env_out.push_back(value); diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs index 652ec77eb..e66536729 100644 --- a/src/commands/fetch.rs +++ b/src/commands/fetch.rs @@ -10,7 +10,6 @@ use mime::Mime; use std::path::PathBuf; use std::str::FromStr; use surf::mime; -use uuid::Uuid; pub struct Fetch; impl PerItemCommand for Fetch { @@ -44,16 +43,18 @@ fn run( registry: &CommandRegistry, raw_args: &RawCommandArgs, ) -> Result { - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No file or directory specified")))? - { + let path = match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error( + "No file or directory specified", + "for command", + &call_info.name_tag, + ) + })? { file => file, }; let path_buf = path.as_path()?; let path_str = path_buf.display().to_string(); - let path_span = path.span(); + let path_span = path.tag.span; let has_raw = call_info.args.has("raw"); let registry = registry.clone(); let raw_args = raw_args.clone(); @@ -66,7 +67,7 @@ fn run( yield Err(e); return; } - let (file_extension, contents, contents_tag, anchor_location) = result.unwrap(); + let (file_extension, contents, contents_tag) = result.unwrap(); let file_extension = if has_raw { None @@ -76,21 +77,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -99,7 +93,6 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; @@ -113,7 +106,7 @@ fn run( } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -129,10 +122,7 @@ fn run( Ok(stream.to_output_stream()) } -pub async fn fetch( - location: &str, - span: Span, -) -> Result<(Option, Value, Tag, AnchorLocation), ShellError> { +pub async fn fetch(location: &str, span: Span) -> Result<(Option, Value, Tag), ShellError> { if let Err(_) = url::Url::parse(location) { return Err(ShellError::labeled_error( "Incomplete or incorrect url", @@ -158,9 +148,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::APPLICATION, mime::JSON) => Ok(( Some("json".to_string()), @@ -173,9 +162,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::APPLICATION, mime::OCTET_STREAM) => { let buf: Vec = r.body_bytes().await.map_err(|_| { @@ -190,9 +178,8 @@ pub async fn fetch( Value::binary(buf), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )) } (mime::IMAGE, mime::SVG) => Ok(( @@ -206,9 +193,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::IMAGE, image_ty) => { let buf: Vec = r.body_bytes().await.map_err(|_| { @@ -223,9 +209,8 @@ pub async fn fetch( Value::binary(buf), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )) } (mime::TEXT, mime::HTML) => Ok(( @@ -239,9 +224,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), (mime::TEXT, mime::PLAIN) => { let path_extension = url::Url::parse(location) @@ -266,9 +250,8 @@ pub async fn fetch( })?), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )) } (ty, sub_ty) => Ok(( @@ -276,9 +259,8 @@ pub async fn fetch( Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), } } @@ -287,9 +269,8 @@ pub async fn fetch( Value::string(format!("No content type found")), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::Url(location.to_string())), }, - AnchorLocation::Url(location.to_string()), )), }, Err(_) => { diff --git a/src/commands/first.rs b/src/commands/first.rs index e39b5155d..71d05be7e 100644 --- a/src/commands/first.rs +++ b/src/commands/first.rs @@ -16,7 +16,7 @@ impl WholeStreamCommand for First { } fn signature(&self) -> Signature { - Signature::build("first").required("amount", SyntaxShape::Literal) + Signature::build("first").required("amount", SyntaxShape::Int) } fn usage(&self) -> &str { diff --git a/src/commands/from_bson.rs b/src/commands/from_bson.rs index 7dd00983f..469e15f35 100644 --- a/src/commands/from_bson.rs +++ b/src/commands/from_bson.rs @@ -33,7 +33,7 @@ fn bson_array(input: &Vec, tag: Tag) -> Result>, ShellEr let mut out = vec![]; for value in input { - out.push(convert_bson_value_to_nu_value(value, tag)?); + out.push(convert_bson_value_to_nu_value(value, &tag)?); } Ok(out) @@ -46,100 +46,100 @@ fn convert_bson_value_to_nu_value( let tag = tag.into(); Ok(match v { - Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(tag), - Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), - Bson::Array(a) => Value::Table(bson_array(a, tag)?).tagged(tag), + Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag), + Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag), + Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag), Bson::Document(doc) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); for (k, v) in doc.iter() { - collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, tag)?); + collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?); } collected.into_tagged_value() } - Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag), - Bson::Null => Value::Primitive(Primitive::Nothing).tagged(tag), + Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag), + Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag), Bson::RegExp(r, opts) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$regex".to_string(), - Value::Primitive(Primitive::String(String::from(r))).tagged(tag), + Value::Primitive(Primitive::String(String::from(r))).tagged(&tag), ); collected.insert_tagged( "$options".to_string(), - Value::Primitive(Primitive::String(String::from(opts))).tagged(tag), + Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag), ); collected.into_tagged_value() } - Bson::I32(n) => Value::number(n).tagged(tag), - Bson::I64(n) => Value::number(n).tagged(tag), + Bson::I32(n) => Value::number(n).tagged(&tag), + Bson::I64(n) => Value::number(n).tagged(&tag), Bson::Decimal128(n) => { // TODO: this really isn't great, and we should update this to do a higher // fidelity translation let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| { ShellError::range_error( ExpectedRange::BigDecimal, - &n.tagged(tag), + &n.tagged(&tag), format!("converting BSON Decimal128 to BigDecimal"), ) })?; - Value::Primitive(Primitive::Decimal(decimal)).tagged(tag) + Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag) } Bson::JavaScriptCode(js) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$javascript".to_string(), - Value::Primitive(Primitive::String(String::from(js))).tagged(tag), + Value::Primitive(Primitive::String(String::from(js))).tagged(&tag), ); collected.into_tagged_value() } Bson::JavaScriptCodeWithScope(js, doc) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$javascript".to_string(), - Value::Primitive(Primitive::String(String::from(js))).tagged(tag), + Value::Primitive(Primitive::String(String::from(js))).tagged(&tag), ); collected.insert_tagged( "$scope".to_string(), - convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?, + convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?, ); collected.into_tagged_value() } Bson::TimeStamp(ts) => { - let mut collected = TaggedDictBuilder::new(tag); - collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(tag)); + let mut collected = TaggedDictBuilder::new(tag.clone()); + collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag)); collected.into_tagged_value() } Bson::Binary(bst, bytes) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$binary_subtype".to_string(), match bst { BinarySubtype::UserDefined(u) => Value::number(u), _ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))), } - .tagged(tag), + .tagged(&tag), ); collected.insert_tagged( "$binary".to_string(), - Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(tag), + Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag), ); collected.into_tagged_value() } Bson::ObjectId(obj_id) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$object_id".to_string(), - Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag), + Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag), ); collected.into_tagged_value() } - Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag), + Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag), Bson::Symbol(s) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$symbol".to_string(), - Value::Primitive(Primitive::String(String::from(s))).tagged(tag), + Value::Primitive(Primitive::String(String::from(s))).tagged(&tag), ); collected.into_tagged_value() } @@ -208,13 +208,13 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result - match from_bson_bytes_to_value(vb, tag) { + match from_bson_bytes_to_value(vb, tag.clone()) { Ok(x) => yield ReturnSuccess::value(x), Err(_) => { yield Err(ShellError::labeled_error_with_secondary( "Could not parse as BSON", "input cannot be parsed as BSON", - tag, + tag.clone(), "value originates from here", value_tag, )) @@ -223,7 +223,7 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + tag.clone(), "value originates from here", value_tag, )), diff --git a/src/commands/from_csv.rs b/src/commands/from_csv.rs index ea90ab3de..877c8dc16 100644 --- a/src/commands/from_csv.rs +++ b/src/commands/from_csv.rs @@ -62,12 +62,12 @@ pub fn from_csv_string_to_value( if let Some(row_values) = iter.next() { let row_values = row_values?; - let mut row = TaggedDictBuilder::new(tag); + let mut row = TaggedDictBuilder::new(tag.clone()); for (idx, entry) in row_values.iter().enumerate() { row.insert_tagged( fields.get(idx).unwrap(), - Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), + Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), ); } @@ -77,7 +77,7 @@ pub fn from_csv_string_to_value( } } - Ok(Tagged::from_item(Value::Table(rows), tag)) + Ok(Value::Table(rows).tagged(&tag)) } fn from_csv( @@ -96,7 +96,7 @@ fn from_csv( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -105,15 +105,15 @@ fn from_csv( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name_tag, + name_tag.clone(), "value originates from here", - value_tag, + value_tag.clone(), )), } } - match from_csv_string_to_value(concat_string, skip_headers, name_tag) { + match from_csv_string_to_value(concat_string, skip_headers, name_tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -126,9 +126,9 @@ fn from_csv( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as CSV", "input cannot be parsed as CSV", - name_tag, + name_tag.clone(), "value originates from here", - last_tag, + last_tag.clone(), )) } , } diff --git a/src/commands/from_ini.rs b/src/commands/from_ini.rs index d53ad6777..e55bbd45c 100644 --- a/src/commands/from_ini.rs +++ b/src/commands/from_ini.rs @@ -45,10 +45,13 @@ fn convert_ini_top_to_nu_value( tag: impl Into, ) -> Tagged { let tag = tag.into(); - let mut top_level = TaggedDictBuilder::new(tag); + let mut top_level = TaggedDictBuilder::new(tag.clone()); for (key, value) in v.iter() { - top_level.insert_tagged(key.clone(), convert_ini_second_to_nu_value(value, tag)); + top_level.insert_tagged( + key.clone(), + convert_ini_second_to_nu_value(value, tag.clone()), + ); } top_level.into_tagged_value() @@ -75,7 +78,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -84,15 +87,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_ini_string_to_value(concat_string, tag) { + match from_ini_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -105,7 +108,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result) - let tag = tag.into(); match v { - serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag), - serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(tag), - serde_hjson::Value::F64(n) => Value::number(n).tagged(tag), - serde_hjson::Value::U64(n) => Value::number(n).tagged(tag), - serde_hjson::Value::I64(n) => Value::number(n).tagged(tag), + serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag), + serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag), + serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag), + serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag), + serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag), serde_hjson::Value::String(s) => { - Value::Primitive(Primitive::String(String::from(s))).tagged(tag) + Value::Primitive(Primitive::String(String::from(s))).tagged(&tag) } serde_hjson::Value::Array(a) => Value::Table( a.iter() - .map(|x| convert_json_value_to_nu_value(x, tag)) + .map(|x| convert_json_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), serde_hjson::Value::Object(o) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in o.iter() { - collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag)); } collected.into_tagged_value() @@ -82,7 +82,7 @@ fn from_json( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -91,9 +91,9 @@ fn from_json( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name_tag, + &name_tag, "value originates from here", - value_tag, + &value_tag, )), } @@ -106,15 +106,15 @@ fn from_json( continue; } - match from_json_string_to_value(json_str.to_string(), name_tag) { + match from_json_string_to_value(json_str.to_string(), &name_tag) { Ok(x) => yield ReturnSuccess::value(x), Err(_) => { - if let Some(last_tag) = latest_tag { + if let Some(ref last_tag) = latest_tag { yield Err(ShellError::labeled_error_with_secondary( "Could nnot parse as JSON", "input cannot be parsed as JSON", - name_tag, + &name_tag, "value originates from here", last_tag)) } @@ -122,7 +122,7 @@ fn from_json( } } } else { - match from_json_string_to_value(concat_string, name_tag) { + match from_json_string_to_value(concat_string, name_tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { diff --git a/src/commands/from_sqlite.rs b/src/commands/from_sqlite.rs index 20d087bd5..7b93dc163 100644 --- a/src/commands/from_sqlite.rs +++ b/src/commands/from_sqlite.rs @@ -138,7 +138,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result - match from_sqlite_bytes_to_value(vb, tag) { + match from_sqlite_bytes_to_value(vb, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -151,7 +151,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", value_tag, )), diff --git a/src/commands/from_toml.rs b/src/commands/from_toml.rs index c0098d926..2cfd05916 100644 --- a/src/commands/from_toml.rs +++ b/src/commands/from_toml.rs @@ -36,7 +36,7 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into) -> T toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), toml::Value::Array(a) => Value::Table( a.iter() - .map(|x| convert_toml_value_to_nu_value(x, tag)) + .map(|x| convert_toml_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), @@ -44,10 +44,10 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into) -> T Value::Primitive(Primitive::String(dt.to_string())).tagged(tag) } toml::Value::Table(t) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in t.iter() { - collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag)); } collected.into_tagged_value() @@ -79,7 +79,7 @@ pub fn from_toml( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -88,15 +88,15 @@ pub fn from_toml( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_toml_string_to_value(concat_string, tag) { + match from_toml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -109,7 +109,7 @@ pub fn from_toml( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as TOML", "input cannot be parsed as TOML", - tag, + &tag, "value originates from here", last_tag, )) diff --git a/src/commands/from_tsv.rs b/src/commands/from_tsv.rs index bba532d17..80951b71a 100644 --- a/src/commands/from_tsv.rs +++ b/src/commands/from_tsv.rs @@ -63,12 +63,12 @@ pub fn from_tsv_string_to_value( if let Some(row_values) = iter.next() { let row_values = row_values?; - let mut row = TaggedDictBuilder::new(tag); + let mut row = TaggedDictBuilder::new(&tag); for (idx, entry) in row_values.iter().enumerate() { row.insert_tagged( fields.get(idx).unwrap(), - Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), + Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), ); } @@ -78,7 +78,7 @@ pub fn from_tsv_string_to_value( } } - Ok(Tagged::from_item(Value::Table(rows), tag)) + Ok(Value::Table(rows).tagged(&tag)) } fn from_tsv( @@ -97,7 +97,7 @@ fn from_tsv( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -106,15 +106,15 @@ fn from_tsv( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name_tag, + &name_tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_tsv_string_to_value(concat_string, skip_headers, name_tag) { + match from_tsv_string_to_value(concat_string, skip_headers, name_tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -127,9 +127,9 @@ fn from_tsv( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as TSV", "input cannot be parsed as TSV", - name_tag, + &name_tag, "value originates from here", - last_tag, + &last_tag, )) } , } diff --git a/src/commands/from_url.rs b/src/commands/from_url.rs index 662508deb..ad23ea5b5 100644 --- a/src/commands/from_url.rs +++ b/src/commands/from_url.rs @@ -39,7 +39,7 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -47,9 +47,9 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } diff --git a/src/commands/from_xml.rs b/src/commands/from_xml.rs index 5bba67b42..0425eb408 100644 --- a/src/commands/from_xml.rs +++ b/src/commands/from_xml.rs @@ -34,7 +34,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into) let mut children_values = vec![]; for c in n.children() { - children_values.push(from_node_to_value(&c, tag)); + children_values.push(from_node_to_value(&c, &tag)); } let children_values: Vec> = children_values @@ -94,7 +94,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -103,15 +103,15 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_xml_string_to_value(concat_string, tag) { + match from_xml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -124,9 +124,9 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result) -> serde_yaml::Value::String(s) => Value::string(s).tagged(tag), serde_yaml::Value::Sequence(a) => Value::Table( a.iter() - .map(|x| convert_yaml_value_to_nu_value(x, tag)) + .map(|x| convert_yaml_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), serde_yaml::Value::Mapping(t) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in t.iter() { match k { serde_yaml::Value::String(k) => { - collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag)); } _ => unimplemented!("Unknown key type"), } @@ -108,7 +108,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -117,15 +117,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", - value_tag, + &value_tag, )), } } - match from_yaml_string_to_value(concat_string, tag) { + match from_yaml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { Tagged { item: Value::Table(list), .. } => { for l in list { @@ -138,9 +138,9 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result, - rest: Vec>, + member: ColumnPath, + rest: Vec, } impl WholeStreamCommand for Get { @@ -18,8 +20,8 @@ impl WholeStreamCommand for Get { fn signature(&self) -> Signature { Signature::build("get") - .required("member", SyntaxShape::Member) - .rest(SyntaxShape::Member) + .required("member", SyntaxShape::ColumnPath) + .rest(SyntaxShape::ColumnPath) } fn usage(&self) -> &str { @@ -35,38 +37,41 @@ impl WholeStreamCommand for Get { } } -fn get_member(path: &Tagged, obj: &Tagged) -> Result, ShellError> { +pub type ColumnPath = Vec>; + +pub fn get_column_path( + path: &ColumnPath, + obj: &Tagged, +) -> Result, ShellError> { let mut current = Some(obj); - for p in path.split(".") { + for p in path.iter() { if let Some(obj) = current { - current = match obj.get_data_by_key(p) { + current = match obj.get_data_by_key(&p) { Some(v) => Some(v), None => // Before we give up, see if they gave us a path that matches a field name by itself { - match obj.get_data_by_key(&path.item) { - Some(v) => return Ok(v.clone()), - None => { - let possibilities = obj.data_descriptors(); + let possibilities = obj.data_descriptors(); - let mut possible_matches: Vec<_> = possibilities - .iter() - .map(|x| { - (natural::distance::levenshtein_distance(x, &path.item), x) - }) - .collect(); + let mut possible_matches: Vec<_> = possibilities + .iter() + .map(|x| (natural::distance::levenshtein_distance(x, &p), x)) + .collect(); - possible_matches.sort(); + possible_matches.sort(); - if possible_matches.len() > 0 { - return Err(ShellError::labeled_error( - "Unknown column", - format!("did you mean '{}'?", possible_matches[0].1), - path.tag(), - )); - } - None - } + if possible_matches.len() > 0 { + return Err(ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + tag_for_tagged_list(path.iter().map(|p| p.tag())), + )); + } else { + return Err(ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + tag_for_tagged_list(path.iter().map(|p| p.tag())), + )); } } } @@ -85,7 +90,7 @@ fn get_member(path: &Tagged, obj: &Tagged) -> Result Ok(obj.clone()), - _ => Ok(Value::nothing().tagged(obj.tag)), + _ => Ok(Value::nothing().tagged(&obj.tag)), }, } } @@ -97,6 +102,8 @@ pub fn get( }: GetArgs, RunnableContext { input, .. }: RunnableContext, ) -> Result { + trace!("get {:?} {:?}", member, fields); + let stream = input .values .map(move |item| { @@ -107,10 +114,10 @@ pub fn get( let fields = vec![&member, &fields] .into_iter() .flatten() - .collect::>>(); + .collect::>(); - for field in &fields { - match get_member(field, &item) { + for column_path in &fields { + match get_column_path(column_path, &item) { Ok(Tagged { item: Value::Table(l), .. diff --git a/src/commands/help.rs b/src/commands/help.rs index d780f1345..04e03fb10 100644 --- a/src/commands/help.rs +++ b/src/commands/help.rs @@ -26,7 +26,7 @@ impl PerItemCommand for Help { _raw_args: &RawCommandArgs, _input: Tagged, ) -> Result { - let tag = call_info.name_tag; + let tag = &call_info.name_tag; match call_info.args.nth(0) { Some(Tagged { diff --git a/src/commands/lines.rs b/src/commands/lines.rs index d2a9cdffd..8375098b7 100644 --- a/src/commands/lines.rs +++ b/src/commands/lines.rs @@ -58,7 +58,7 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result Result { - context.shell_manager.ls(path, context.name) + context.shell_manager.ls(path, &context) } diff --git a/src/commands/open.rs b/src/commands/open.rs index 254b0bd7b..2972144bc 100644 --- a/src/commands/open.rs +++ b/src/commands/open.rs @@ -7,7 +7,6 @@ use crate::parser::hir::SyntaxShape; use crate::parser::registry::Signature; use crate::prelude::*; use std::path::{Path, PathBuf}; -use uuid::Uuid; pub struct Open; impl PerItemCommand for Open { @@ -45,16 +44,18 @@ fn run( let cwd = PathBuf::from(shell_manager.path()); let full_path = PathBuf::from(cwd); - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No file or directory specified")))? - { + let path = match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error( + "No file or directory specified", + "for command", + &call_info.name_tag, + ) + })? { file => file, }; let path_buf = path.as_path()?; let path_str = path_buf.display().to_string(); - let path_span = path.span(); + let path_span = path.tag.span; let has_raw = call_info.args.has("raw"); let registry = registry.clone(); let raw_args = raw_args.clone(); @@ -67,7 +68,7 @@ fn run( yield Err(e); return; } - let (file_extension, contents, contents_tag, anchor_location) = result.unwrap(); + let (file_extension, contents, contents_tag) = result.unwrap(); let file_extension = if has_raw { None @@ -77,21 +78,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -100,7 +94,6 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; @@ -114,7 +107,7 @@ fn run( } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -134,7 +127,7 @@ pub async fn fetch( cwd: &PathBuf, location: &str, span: Span, -) -> Result<(Option, Value, Tag, AnchorLocation), ShellError> { +) -> Result<(Option, Value, Tag), ShellError> { let mut cwd = cwd.clone(); cwd.push(Path::new(location)); @@ -147,9 +140,8 @@ pub async fn fetch( Value::string(s), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File(cwd.to_string_lossy().to_string())), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), Err(_) => { //Non utf8 data. @@ -166,18 +158,20 @@ pub async fn fetch( Value::string(s), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), Err(_) => Ok(( None, Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), } } else { @@ -186,9 +180,10 @@ pub async fn fetch( Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )) } } @@ -204,18 +199,20 @@ pub async fn fetch( Value::string(s), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), Err(_) => Ok(( None, Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), } } else { @@ -224,9 +221,10 @@ pub async fn fetch( Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )) } } @@ -235,9 +233,10 @@ pub async fn fetch( Value::binary(bytes), Tag { span, - anchor: Uuid::new_v4(), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - AnchorLocation::File(cwd.to_string_lossy().to_string()), )), } } diff --git a/src/commands/pivot.rs b/src/commands/pivot.rs index 1a6bb901f..e52ab9092 100644 --- a/src/commands/pivot.rs +++ b/src/commands/pivot.rs @@ -104,7 +104,7 @@ pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing begin_filter response: {:?} {}", e, input )))); @@ -138,7 +138,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading begin_filter response: {:?}", e )))); @@ -189,7 +189,7 @@ pub fn filter_plugin( }, Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing end_filter response: {:?} {}", e, input )))); @@ -199,7 +199,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading end_filter: {:?}", e )))); @@ -236,7 +236,7 @@ pub fn filter_plugin( }, Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing filter response: {:?} {}", e, input )))); @@ -246,7 +246,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading filter response: {:?}", e )))); diff --git a/src/commands/post.rs b/src/commands/post.rs index 5a77afd14..374616d2e 100644 --- a/src/commands/post.rs +++ b/src/commands/post.rs @@ -54,21 +54,20 @@ fn run( registry: &CommandRegistry, raw_args: &RawCommandArgs, ) -> Result { + let name_tag = call_info.name_tag.clone(); let call_info = call_info.clone(); - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No url specified")))? - { - file => file.clone(), - }; - let body = match call_info - .args - .nth(1) - .ok_or_else(|| ShellError::string(&format!("No body specified")))? - { - file => file.clone(), - }; + let path = + match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error("No url specified", "for command", &name_tag) + })? { + file => file.clone(), + }; + let body = + match call_info.args.nth(1).ok_or_else(|| { + ShellError::labeled_error("No body specified", "for command", &name_tag) + })? { + file => file.clone(), + }; let path_str = path.as_string()?; let path_span = path.tag(); let has_raw = call_info.args.has("raw"); @@ -83,7 +82,7 @@ fn run( let headers = get_headers(&call_info)?; let stream = async_stream! { - let (file_extension, contents, contents_tag, anchor_location) = + let (file_extension, contents, contents_tag) = post(&path_str, &body, user, password, &headers, path_span, ®istry, &raw_args).await.unwrap(); let file_extension = if has_raw { @@ -94,21 +93,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if contents_tag.anchor != uuid::Uuid::nil() { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddAnchorLocation( - contents_tag.anchor, - anchor_location, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -117,7 +109,6 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; @@ -131,7 +122,7 @@ fn run( } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -211,7 +202,7 @@ pub async fn post( tag: Tag, registry: &CommandRegistry, raw_args: &RawCommandArgs, -) -> Result<(Option, Value, Tag, AnchorLocation), ShellError> { +) -> Result<(Option, Value, Tag), ShellError> { let registry = registry.clone(); let raw_args = raw_args.clone(); if location.starts_with("http:") || location.starts_with("https:") { @@ -252,6 +243,7 @@ pub async fn post( if let Some(converter) = registry.get_command("to-json") { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -260,7 +252,6 @@ pub async fn post( named: None, }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, }, }; @@ -284,7 +275,7 @@ pub async fn post( return Err(ShellError::labeled_error( "Save could not successfully save", "unexpected data during save", - *tag, + tag, )); } } @@ -300,7 +291,7 @@ pub async fn post( return Err(ShellError::labeled_error( "Could not automatically convert table", "needs manual conversion", - *tag, + tag, )); } } @@ -316,11 +307,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), (mime::APPLICATION, mime::JSON) => Ok(( Some("json".to_string()), @@ -328,25 +321,29 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), (mime::APPLICATION, mime::OCTET_STREAM) => { let buf: Vec = r.body_bytes().await.map_err(|_| { ShellError::labeled_error( "Could not load binary file", "could not load", - tag, + &tag, ) })?; Ok(( None, Value::binary(buf), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )) } (mime::IMAGE, image_ty) => { @@ -354,14 +351,16 @@ pub async fn post( ShellError::labeled_error( "Could not load image file", "could not load", - tag, + &tag, ) })?; Ok(( Some(image_ty.to_string()), Value::binary(buf), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )) } (mime::TEXT, mime::HTML) => Ok(( @@ -370,11 +369,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), (mime::TEXT, mime::PLAIN) => { let path_extension = url::Url::parse(location) @@ -394,11 +395,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - tag, + &tag, ) })?), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )) } (ty, sub_ty) => Ok(( @@ -407,16 +410,20 @@ pub async fn post( "Not yet supported MIME type: {} {}", ty, sub_ty )), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), } } None => Ok(( None, Value::string(format!("No content type found")), - tag, - AnchorLocation::Url(location.to_string()), + Tag { + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, + }, )), }, Err(_) => { diff --git a/src/commands/save.rs b/src/commands/save.rs index 47f1a17e9..ac48fe280 100644 --- a/src/commands/save.rs +++ b/src/commands/save.rs @@ -119,49 +119,48 @@ fn save( input, name, shell_manager, - source_map, host, + ctrl_c, commands: registry, .. }: RunnableContext, raw_args: RawCommandArgs, ) -> Result { let mut full_path = PathBuf::from(shell_manager.path()); - let name_tag = name; + let name_tag = name.clone(); - let source_map = source_map.clone(); let stream = async_stream! { let input: Vec> = input.values.collect().await; if path.is_none() { // If there is no filename, check the metadata for the anchor filename if input.len() > 0 { let anchor = input[0].anchor(); - match source_map.get(&anchor) { + match anchor { Some(path) => match path { AnchorLocation::File(file) => { - full_path.push(Path::new(file)); + full_path.push(Path::new(&file)); } _ => { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (1)", "needs path", - name_tag, + name_tag.clone(), )); } }, None => { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (2)", "needs path", - name_tag, + name_tag.clone(), )); } } } else { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (3)", "needs path", - name_tag, + name_tag.clone(), )); } } else { @@ -179,6 +178,7 @@ fn save( if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host, + ctrl_c, shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -187,7 +187,6 @@ fn save( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, name_tag: raw_args.call_info.name_tag, } }; @@ -212,9 +211,9 @@ fn save( match content { Ok(save_data) => match std::fs::write(full_path, save_data) { Ok(o) => o, - Err(e) => yield Err(ShellError::string(e.to_string())), + Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)), }, - Err(e) => yield Err(ShellError::string(e.to_string())), + Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)), } }; diff --git a/src/commands/shells.rs b/src/commands/shells.rs index 2aee2c856..6058a4203 100644 --- a/src/commands/shells.rs +++ b/src/commands/shells.rs @@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand; use crate::data::TaggedDictBuilder; use crate::errors::ShellError; use crate::prelude::*; +use std::sync::atomic::Ordering; pub struct Shells; @@ -32,14 +33,14 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result Result Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - tag, + &tag, "value originates from here", v.tag(), )), diff --git a/src/commands/skip_while.rs b/src/commands/skip_while.rs index 041caf300..a768ae613 100644 --- a/src/commands/skip_while.rs +++ b/src/commands/skip_while.rs @@ -1,6 +1,7 @@ use crate::commands::WholeStreamCommand; use crate::errors::ShellError; use crate::prelude::*; +use log::trace; pub struct SkipWhile; @@ -38,7 +39,9 @@ pub fn skip_while( RunnableContext { input, .. }: RunnableContext, ) -> Result { let objects = input.values.skip_while(move |item| { + trace!("ITEM = {:?}", item); let result = condition.invoke(&item); + trace!("RESULT = {:?}", result); let return_value = match result { Ok(ref v) if v.is_true() => true, diff --git a/src/commands/split_column.rs b/src/commands/split_column.rs index 00e2609f2..d17428302 100644 --- a/src/commands/split_column.rs +++ b/src/commands/split_column.rs @@ -94,7 +94,7 @@ fn split_column( _ => Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name, + &name, "value originates from here", v.tag(), )), diff --git a/src/commands/split_row.rs b/src/commands/split_row.rs index e70e5cfa8..94f7564b4 100644 --- a/src/commands/split_row.rs +++ b/src/commands/split_row.rs @@ -60,7 +60,7 @@ fn split_row( result.push_back(Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name, + &name, "value originates from here", v.tag(), ))); diff --git a/src/commands/table.rs b/src/commands/table.rs index e9fbe35f2..8ad2c246d 100644 --- a/src/commands/table.rs +++ b/src/commands/table.rs @@ -5,16 +5,13 @@ use crate::prelude::*; pub struct Table; -#[derive(Deserialize)] -pub struct TableArgs {} - impl WholeStreamCommand for Table { fn name(&self) -> &str { "table" } fn signature(&self) -> Signature { - Signature::build("table") + Signature::build("table").named("start_number", SyntaxShape::Number) } fn usage(&self) -> &str { @@ -26,16 +23,29 @@ impl WholeStreamCommand for Table { args: CommandArgs, registry: &CommandRegistry, ) -> Result { - args.process(registry, table)?.run() + table(args, registry) } } -pub fn table(_args: TableArgs, context: RunnableContext) -> Result { +fn table(args: CommandArgs, registry: &CommandRegistry) -> Result { + let args = args.evaluate_once(registry)?; + let stream = async_stream! { - let input: Vec> = context.input.into_vec().await; + let host = args.host.clone(); + let start_number = match args.get("start_number") { + Some(Tagged { item: Value::Primitive(Primitive::Int(i)), .. }) => { + i.to_usize().unwrap() + } + _ => { + 0 + } + }; + + let input: Vec> = args.input.into_vec().await; if input.len() > 0 { - let mut host = context.host.lock().unwrap(); - let view = TableView::from_list(&input); + let mut host = host.lock().unwrap(); + let view = TableView::from_list(&input, start_number); + if let Some(view) = view { handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host)); } diff --git a/src/commands/tags.rs b/src/commands/tags.rs index 0cef300b0..221e8cc30 100644 --- a/src/commands/tags.rs +++ b/src/commands/tags.rs @@ -28,7 +28,6 @@ impl WholeStreamCommand for Tags { } fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result { - let source_map = args.call_info.source_map.clone(); Ok(args .input .values @@ -38,11 +37,11 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result { tags.insert("anchor", Value::string(source)); } diff --git a/src/commands/to_bson.rs b/src/commands/to_bson.rs index a36d99c07..eabf8381e 100644 --- a/src/commands/to_bson.rs +++ b/src/commands/to_bson.rs @@ -46,7 +46,7 @@ pub fn value_to_bson_value(v: &Tagged) -> Result { Value::Primitive(Primitive::BeginningOfStream) => Bson::Null, Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()), Value::Primitive(Primitive::Int(i)) => { - Bson::I64(i.tagged(v.tag).coerce_into("converting to BSON")?) + Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?) } Value::Primitive(Primitive::Nothing) => Bson::Null, Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()), @@ -58,6 +58,7 @@ pub fn value_to_bson_value(v: &Tagged) -> Result { .collect::>()?, ), Value::Block(_) => Bson::Null, + Value::Error(e) => return Err(e.clone()), Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()), Value::Row(o) => object_value_to_bson(o)?, }) @@ -170,7 +171,7 @@ fn get_binary_subtype<'a>(tagged_value: &'a Tagged) -> Result unreachable!(), }), Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined( - i.tagged(tagged_value.tag) + i.tagged(&tagged_value.tag) .coerce_into("converting to BSON binary subtype")?, )), _ => Err(ShellError::type_error( @@ -207,12 +208,12 @@ fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result, ShellError> { Bson::Array(a) => { for v in a.into_iter() { match v { - Bson::Document(d) => shell_encode_document(&mut out, d, tag)?, + Bson::Document(d) => shell_encode_document(&mut out, d, tag.clone())?, _ => { return Err(ShellError::labeled_error( format!("All top level values must be Documents, got {:?}", v), "requires BSON-compatible document", - tag, + &tag, )) } } @@ -237,7 +238,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -248,14 +249,14 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result { - match bson_value_to_bytes(bson_value, name_tag) { + match bson_value_to_bytes(bson_value, name_tag.clone()) { Ok(x) => yield ReturnSuccess::value( - Value::binary(x).tagged(name_tag), + Value::binary(x).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with BSON-compatible structure.tag() from pipeline", "requires BSON-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -264,7 +265,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with BSON-compatible structure from pipeline", "requires BSON-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/to_csv.rs b/src/commands/to_csv.rs index 1897fb86b..90f483745 100644 --- a/src/commands/to_csv.rs +++ b/src/commands/to_csv.rs @@ -32,8 +32,8 @@ impl WholeStreamCommand for ToCSV { } } -pub fn value_to_csv_value(v: &Value) -> Value { - match v { +pub fn value_to_csv_value(v: &Tagged) -> Tagged { + match &v.item { Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())), Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing), Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())), @@ -47,10 +47,11 @@ pub fn value_to_csv_value(v: &Value) -> Value { Value::Block(_) => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing), } + .tagged(v.tag.clone()) } -fn to_string_helper(v: &Value) -> Result { - match v { +fn to_string_helper(v: &Tagged) -> Result { + match &v.item { Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()), Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)), Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?), @@ -60,7 +61,13 @@ fn to_string_helper(v: &Value) -> Result { Value::Table(_) => return Ok(String::from("[Table]")), Value::Row(_) => return Ok(String::from("[Row]")), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), - _ => return Err(ShellError::string("Unexpected value")), + _ => { + return Err(ShellError::labeled_error( + "Unexpected value", + "", + v.tag.clone(), + )) + } } } @@ -76,7 +83,9 @@ fn merge_descriptors(values: &[Tagged]) -> Vec { ret } -pub fn to_string(v: &Value) -> Result { +pub fn to_string(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; + match v { Value::Row(o) => { let mut wtr = WriterBuilder::new().from_writer(vec![]); @@ -92,11 +101,20 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(fields).expect("can not write."); wtr.write_record(values).expect("can not write."); - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); } Value::Table(list) => { let mut wtr = WriterBuilder::new().from_writer(vec![]); @@ -120,13 +138,22 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(&row).expect("can not write"); } - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); } - _ => return to_string_helper(&v), + _ => return to_string_helper(tagged_value), } } @@ -139,7 +166,7 @@ fn to_csv( let input: Vec> = input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -148,20 +175,20 @@ fn to_csv( }; for value in to_process_input { - match to_string(&value_to_csv_value(&value.item)) { + match to_string(&value_to_csv_value(&value)) { Ok(x) => { let converted = if headerless { x.lines().skip(1).collect() } else { x }; - yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag)) + yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag)) } _ => { yield Err(ShellError::labeled_error_with_secondary( "Expected a table with CSV-compatible structure.tag() from pipeline", "requires CSV-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )) diff --git a/src/commands/to_json.rs b/src/commands/to_json.rs index 9c06299aa..40edc5aeb 100644 --- a/src/commands/to_json.rs +++ b/src/commands/to_json.rs @@ -42,7 +42,7 @@ pub fn value_to_json_value(v: &Tagged) -> Result serde_json::Value::Number(serde_json::Number::from( - CoerceInto::::coerce_into(i.tagged(v.tag), "converting to JSON number")?, + CoerceInto::::coerce_into(i.tagged(&v.tag), "converting to JSON number")?, )), Value::Primitive(Primitive::Nothing) => serde_json::Value::Null, Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()), @@ -50,6 +50,7 @@ pub fn value_to_json_value(v: &Tagged) -> Result serde_json::Value::String(s.display().to_string()), Value::Table(l) => serde_json::Value::Array(json_list(l)?), + Value::Error(e) => return Err(e.clone()), Value::Block(_) => serde_json::Value::Null, Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array( b.iter() @@ -85,7 +86,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -98,12 +99,12 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result { match serde_json::to_string(&json_value) { Ok(x) => yield ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).tagged(name_tag), + Value::Primitive(Primitive::String(x)).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with JSON-compatible structure.tag() from pipeline", "requires JSON-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -112,7 +113,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with JSON-compatible structure from pipeline", "requires JSON-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/to_toml.rs b/src/commands/to_toml.rs index 6c8904e0c..778fdd256 100644 --- a/src/commands/to_toml.rs +++ b/src/commands/to_toml.rs @@ -38,10 +38,10 @@ pub fn value_to_toml_value(v: &Tagged) -> Result toml::Value::String("".to_string()) } Value::Primitive(Primitive::Decimal(f)) => { - toml::Value::Float(f.tagged(v.tag).coerce_into("converting to TOML float")?) + toml::Value::Float(f.tagged(&v.tag).coerce_into("converting to TOML float")?) } Value::Primitive(Primitive::Int(i)) => { - toml::Value::Integer(i.tagged(v.tag).coerce_into("converting to TOML integer")?) + toml::Value::Integer(i.tagged(&v.tag).coerce_into("converting to TOML integer")?) } Value::Primitive(Primitive::Nothing) => toml::Value::String("".to_string()), Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()), @@ -49,6 +49,7 @@ pub fn value_to_toml_value(v: &Tagged) -> Result Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()), Value::Table(l) => toml::Value::Array(collect_values(l)?), + Value::Error(e) => return Err(e.clone()), Value::Block(_) => toml::Value::String("".to_string()), Value::Primitive(Primitive::Binary(b)) => { toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect()) @@ -80,7 +81,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -93,12 +94,12 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result { match toml::to_string(&toml_value) { Ok(x) => yield ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).tagged(name_tag), + Value::Primitive(Primitive::String(x)).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with TOML-compatible structure.tag() from pipeline", "requires TOML-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -107,7 +108,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with TOML-compatible structure from pipeline", "requires TOML-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/to_tsv.rs b/src/commands/to_tsv.rs index 4edc26fac..83cb4a07f 100644 --- a/src/commands/to_tsv.rs +++ b/src/commands/to_tsv.rs @@ -32,7 +32,9 @@ impl WholeStreamCommand for ToTSV { } } -pub fn value_to_tsv_value(v: &Value) -> Value { +pub fn value_to_tsv_value(tagged_value: &Tagged) -> Tagged { + let v = &tagged_value.item; + match v { Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())), Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing), @@ -47,20 +49,28 @@ pub fn value_to_tsv_value(v: &Value) -> Value { Value::Block(_) => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing), } + .tagged(&tagged_value.tag) } -fn to_string_helper(v: &Value) -> Result { +fn to_string_helper(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; match v { Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()), Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)), - Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?), - Value::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?), - Value::Primitive(Primitive::Int(_)) => Ok(v.as_string()?), - Value::Primitive(Primitive::Path(_)) => Ok(v.as_string()?), + Value::Primitive(Primitive::Boolean(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Decimal(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Int(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Path(_)) => Ok(tagged_value.as_string()?), Value::Table(_) => return Ok(String::from("[table]")), Value::Row(_) => return Ok(String::from("[row]")), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), - _ => return Err(ShellError::string("Unexpected value")), + _ => { + return Err(ShellError::labeled_error( + "Unexpected value", + "original value", + &tagged_value.tag, + )) + } } } @@ -76,7 +86,9 @@ fn merge_descriptors(values: &[Tagged]) -> Vec { ret } -pub fn to_string(v: &Value) -> Result { +pub fn to_string(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; + match v { Value::Row(o) => { let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]); @@ -91,11 +103,20 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(fields).expect("can not write."); wtr.write_record(values).expect("can not write."); - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); } Value::Table(list) => { let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]); @@ -119,13 +140,22 @@ pub fn to_string(v: &Value) -> Result { wtr.write_record(&row).expect("can not write"); } - return Ok(String::from_utf8( - wtr.into_inner() - .map_err(|_| ShellError::string("Could not convert record"))?, - ) - .map_err(|_| ShellError::string("Could not convert record"))?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); } - _ => return to_string_helper(&v), + _ => return to_string_helper(tagged_value), } } @@ -138,7 +168,7 @@ fn to_tsv( let input: Vec> = input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -147,20 +177,20 @@ fn to_tsv( }; for value in to_process_input { - match to_string(&value_to_tsv_value(&value.item)) { + match to_string(&value_to_tsv_value(&value)) { Ok(x) => { let converted = if headerless { x.lines().skip(1).collect() } else { x }; - yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag)) + yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag)) } _ => { yield Err(ShellError::labeled_error_with_secondary( "Expected a table with TSV-compatible structure.tag() from pipeline", "requires TSV-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )) diff --git a/src/commands/to_url.rs b/src/commands/to_url.rs index dfba5faf4..8dee0a87d 100644 --- a/src/commands/to_url.rs +++ b/src/commands/to_url.rs @@ -47,7 +47,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result Result { - yield ReturnSuccess::value(Value::string(s).tagged(tag)); + yield ReturnSuccess::value(Value::string(s).tagged(&tag)); } _ => { yield Err(ShellError::labeled_error( "Failed to convert to url-encoded", "cannot url-encode", - tag, + &tag, )) } } @@ -72,7 +72,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result) -> Result serde_yaml::Value::Number(serde_yaml::Number::from( - CoerceInto::::coerce_into(i.tagged(v.tag), "converting to YAML number")?, + CoerceInto::::coerce_into(i.tagged(&v.tag), "converting to YAML number")?, )), Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null, Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()), @@ -55,6 +55,7 @@ pub fn value_to_yaml_value(v: &Tagged) -> Result return Err(e.clone()), Value::Block(_) => serde_yaml::Value::Null, Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence( b.iter() @@ -81,7 +82,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result> = args.input.values.collect().await; let to_process_input = if input.len() > 1 { - let tag = input[0].tag; + let tag = input[0].tag.clone(); vec![Tagged { item: Value::Table(input), tag } ] } else if input.len() == 1 { input @@ -94,12 +95,12 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result { match serde_yaml::to_string(&yaml_value) { Ok(x) => yield ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).tagged(name_tag), + Value::Primitive(Primitive::String(x)).tagged(&name_tag), ), _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a table with YAML-compatible structure.tag() from pipeline", "requires YAML-compatible input", - name_tag, + &name_tag, "originates from here".to_string(), value.tag(), )), @@ -108,7 +109,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error( "Expected a table with YAML-compatible structure from pipeline", "requires YAML-compatible input", - name_tag)) + &name_tag)) } } }; diff --git a/src/commands/version.rs b/src/commands/version.rs index 01a134929..11b243f08 100644 --- a/src/commands/version.rs +++ b/src/commands/version.rs @@ -31,14 +31,14 @@ impl WholeStreamCommand for Version { pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let tag = args.call_info.name_tag; + let tag = args.call_info.name_tag.clone(); let mut indexmap = IndexMap::new(); indexmap.insert( "version".to_string(), - Value::string(clap::crate_version!()).tagged(tag), + Value::string(clap::crate_version!()).tagged(&tag), ); - let value = Value::Row(Dictionary::from(indexmap)).tagged(tag); + let value = Value::Row(Dictionary::from(indexmap)).tagged(&tag); Ok(OutputStream::one(value)) } diff --git a/src/commands/where_.rs b/src/commands/where_.rs index 673c6dda8..9e3c4d2c0 100644 --- a/src/commands/where_.rs +++ b/src/commands/where_.rs @@ -49,7 +49,7 @@ impl PerItemCommand for Where { return Err(ShellError::labeled_error( "Expected a condition", "where needs a condition", - *tag, + tag, )) } }; diff --git a/src/commands/which_.rs b/src/commands/which_.rs index 905515848..e3b6d1c96 100644 --- a/src/commands/which_.rs +++ b/src/commands/which_.rs @@ -33,7 +33,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result 0 { @@ -52,7 +52,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result); - -impl SourceMap { - pub fn insert(&mut self, uuid: Uuid, anchor_location: AnchorLocation) { - self.0.insert(uuid, anchor_location); - } - - pub fn get(&self, uuid: &Uuid) -> Option<&AnchorLocation> { - self.0.get(uuid) - } - - pub fn new() -> SourceMap { - SourceMap(HashMap::new()) - } -} - #[derive(Clone, new)] pub struct CommandRegistry { #[new(value = "Arc::new(Mutex::new(IndexMap::default()))")] @@ -53,13 +34,17 @@ impl CommandRegistry { registry.get(name).map(|c| c.clone()) } + pub(crate) fn expect_command(&self, name: &str) -> Arc { + self.get_command(name).unwrap() + } + pub(crate) fn has(&self, name: &str) -> bool { let registry = self.registry.lock().unwrap(); registry.contains_key(name) } - fn insert(&mut self, name: impl Into, command: Arc) { + pub(crate) fn insert(&mut self, name: impl Into, command: Arc) { let mut registry = self.registry.lock().unwrap(); registry.insert(name.into(), command); } @@ -73,8 +58,8 @@ impl CommandRegistry { #[derive(Clone)] pub struct Context { registry: CommandRegistry, - pub(crate) source_map: SourceMap, host: Arc>, + pub ctrl_c: Arc, pub(crate) shell_manager: ShellManager, } @@ -83,12 +68,20 @@ impl Context { &self.registry } + pub(crate) fn expand_context<'context>( + &'context self, + source: &'context Text, + span: Span, + ) -> ExpandContext<'context> { + ExpandContext::new(&self.registry, span, source, self.shell_manager.homedir()) + } + pub(crate) fn basic() -> Result> { let registry = CommandRegistry::new(); Ok(Context { registry: registry.clone(), - source_map: SourceMap::new(), host: Arc::new(Mutex::new(crate::env::host::BasicHost)), + ctrl_c: Arc::new(AtomicBool::new(false)), shell_manager: ShellManager::basic(registry)?, }) } @@ -105,43 +98,31 @@ impl Context { } } - pub fn add_anchor_location(&mut self, uuid: Uuid, anchor_location: AnchorLocation) { - self.source_map.insert(uuid, anchor_location); + pub(crate) fn get_command(&self, name: &str) -> Option> { + self.registry.get_command(name) } - pub(crate) fn has_command(&self, name: &str) -> bool { - self.registry.has(name) - } - - pub(crate) fn get_command(&self, name: &str) -> Arc { - self.registry.get_command(name).unwrap() + pub(crate) fn expect_command(&self, name: &str) -> Arc { + self.registry.expect_command(name) } pub(crate) fn run_command<'a>( &mut self, command: Arc, name_tag: Tag, - source_map: SourceMap, args: hir::Call, source: &Text, input: InputStream, is_first_command: bool, ) -> OutputStream { - let command_args = self.command_args(args, input, source, source_map, name_tag); + let command_args = self.command_args(args, input, source, name_tag); command.run(command_args, self.registry(), is_first_command) } - fn call_info( - &self, - args: hir::Call, - source: &Text, - source_map: SourceMap, - name_tag: Tag, - ) -> UnevaluatedCallInfo { + fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo { UnevaluatedCallInfo { args, source: source.clone(), - source_map, name_tag, } } @@ -151,13 +132,13 @@ impl Context { args: hir::Call, input: InputStream, source: &Text, - source_map: SourceMap, name_tag: Tag, ) -> CommandArgs { CommandArgs { host: self.host.clone(), + ctrl_c: self.ctrl_c.clone(), shell_manager: self.shell_manager.clone(), - call_info: self.call_info(args, source, source_map, name_tag), + call_info: self.call_info(args, source, name_tag), input, } } diff --git a/src/data/base.rs b/src/data/base.rs index 04465181a..f7b875ef5 100644 --- a/src/data/base.rs +++ b/src/data/base.rs @@ -8,6 +8,7 @@ use crate::Text; use chrono::{DateTime, Utc}; use chrono_humanize::Humanize; use derive_new::new; +use log::trace; use serde::{Deserialize, Serialize}; use std::fmt; use std::path::PathBuf; @@ -212,11 +213,19 @@ impl Block { let scope = Scope::new(value.clone()); if self.expressions.len() == 0 { - return Ok(Value::nothing().tagged(self.tag)); + return Ok(Value::nothing().tagged(&self.tag)); } let mut last = None; + trace!( + "EXPRS = {:?}", + self.expressions + .iter() + .map(|e| format!("{}", e)) + .collect::>() + ); + for expr in self.expressions.iter() { last = Some(evaluate_baseline_expr( &expr, @@ -236,6 +245,9 @@ pub enum Value { Row(crate::data::Dictionary), Table(Vec>), + // Errors are a type of value too + Error(ShellError), + Block(Block), } @@ -284,14 +296,15 @@ impl fmt::Debug for ValueDebug<'_> { Value::Row(o) => o.debug(f), Value::Table(l) => debug_list(l).fmt(f), Value::Block(_) => write!(f, "[[block]]"), + Value::Error(_) => write!(f, "[[error]]"), } } } impl Tagged { - pub(crate) fn tagged_type_name(&self) -> Tagged { + pub fn tagged_type_name(&self) -> Tagged { let name = self.type_name(); - Tagged::from_item(name, self.tag()) + name.tagged(self.tag()) } } @@ -303,7 +316,7 @@ impl std::convert::TryFrom<&Tagged> for Block { Value::Block(block) => Ok(block.clone()), v => Err(ShellError::type_error( "Block", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -315,11 +328,11 @@ impl std::convert::TryFrom<&Tagged> for i64 { fn try_from(value: &Tagged) -> Result { match value.item() { Value::Primitive(Primitive::Int(int)) => { - int.tagged(value.tag).coerce_into("converting to i64") + int.tagged(&value.tag).coerce_into("converting to i64") } v => Err(ShellError::type_error( "Integer", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -333,7 +346,7 @@ impl std::convert::TryFrom<&Tagged> for String { Value::Primitive(Primitive::String(s)) => Ok(s.clone()), v => Err(ShellError::type_error( "String", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -347,7 +360,7 @@ impl std::convert::TryFrom<&Tagged> for Vec { Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()), v => Err(ShellError::type_error( "Binary", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -361,7 +374,7 @@ impl<'a> std::convert::TryFrom<&'a Tagged> for &'a crate::data::Dictionar Value::Row(d) => Ok(d), v => Err(ShellError::type_error( "Dictionary", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -383,7 +396,7 @@ impl std::convert::TryFrom>> for Switch { Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present), v => Err(ShellError::type_error( "Boolean", - value.copy_tag(v.type_name()), + v.type_name().tagged(value.tag()), )), }, } @@ -394,15 +407,54 @@ impl Tagged { pub(crate) fn debug(&self) -> ValueDebug<'_> { ValueDebug { value: self } } + + pub fn as_column_path(&self) -> Result>>, ShellError> { + let mut out: Vec> = vec![]; + + match &self.item { + Value::Table(table) => { + for item in table { + out.push(item.as_string()?.tagged(&item.tag)); + } + } + + other => { + return Err(ShellError::type_error( + "column name", + other.type_name().tagged(&self.tag), + )) + } + } + + Ok(out.tagged(&self.tag)) + } + + pub(crate) fn as_string(&self) -> Result { + match &self.item { + Value::Primitive(Primitive::String(s)) => Ok(s.clone()), + Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())), + // TODO: this should definitely be more general with better errors + other => Err(ShellError::labeled_error( + "Expected string", + other.type_name(), + &self.tag, + )), + } + } } impl Value { - pub(crate) fn type_name(&self) -> String { + pub fn type_name(&self) -> String { match self { Value::Primitive(p) => p.type_name(), - Value::Row(_) => format!("object"), + Value::Row(_) => format!("row"), Value::Table(_) => format!("list"), Value::Block(_) => format!("block"), + Value::Error(_) => format!("error"), } } @@ -418,6 +470,7 @@ impl Value { .collect(), Value::Block(_) => vec![], Value::Table(_) => vec![], + Value::Error(_) => vec![], } } @@ -443,6 +496,22 @@ impl Value { } } + pub fn get_data_by_column_path( + &self, + tag: Tag, + path: &Vec>, + ) -> Option> { + let mut current = self; + for p in path { + match current.get_data_by_key(p) { + Some(v) => current = v, + None => return None, + } + } + + Some(current.tagged(tag)) + } + pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option> { let mut current = self; for p in path.split(".") { @@ -452,7 +521,7 @@ impl Value { } } - Some(Tagged::from_item(current, tag)) + Some(current.tagged(tag)) } pub fn insert_data_at_path( @@ -472,8 +541,8 @@ impl Value { // Special case for inserting at the top level current .entries - .insert(path.to_string(), Tagged::from_item(new_value, tag)); - return Some(Tagged::from_item(new_obj, tag)); + .insert(path.to_string(), new_value.tagged(&tag)); + return Some(new_obj.tagged(&tag)); } for idx in 0..split_path.len() { @@ -484,13 +553,64 @@ impl Value { Value::Row(o) => { o.entries.insert( split_path[idx + 1].to_string(), - Tagged::from_item(new_value, tag), + new_value.tagged(&tag), ); } _ => {} } - return Some(Tagged::from_item(new_obj, tag)); + return Some(new_obj.tagged(&tag)); + } else { + match next.item { + Value::Row(ref mut o) => { + current = o; + } + _ => return None, + } + } + } + _ => return None, + } + } + } + + None + } + + pub fn insert_data_at_column_path( + &self, + tag: Tag, + split_path: &Vec>, + new_value: Value, + ) -> Option> { + let mut new_obj = self.clone(); + + if let Value::Row(ref mut o) = new_obj { + let mut current = o; + + if split_path.len() == 1 { + // Special case for inserting at the top level + current + .entries + .insert(split_path[0].item.clone(), new_value.tagged(&tag)); + return Some(new_obj.tagged(&tag)); + } + + for idx in 0..split_path.len() { + match current.entries.get_mut(&split_path[idx].item) { + Some(next) => { + if idx == (split_path.len() - 2) { + match &mut next.item { + Value::Row(o) => { + o.entries.insert( + split_path[idx + 1].to_string(), + new_value.tagged(&tag), + ); + } + _ => {} + } + + return Some(new_obj.tagged(&tag)); } else { match next.item { Value::Row(ref mut o) => { @@ -524,8 +644,41 @@ impl Value { match current.entries.get_mut(split_path[idx]) { Some(next) => { if idx == (split_path.len() - 1) { - *next = Tagged::from_item(replaced_value, tag); - return Some(Tagged::from_item(new_obj, tag)); + *next = replaced_value.tagged(&tag); + return Some(new_obj.tagged(&tag)); + } else { + match next.item { + Value::Row(ref mut o) => { + current = o; + } + _ => return None, + } + } + } + _ => return None, + } + } + } + + None + } + + pub fn replace_data_at_column_path( + &self, + tag: Tag, + split_path: &Vec>, + replaced_value: Value, + ) -> Option> { + let mut new_obj = self.clone(); + + if let Value::Row(ref mut o) = new_obj { + let mut current = o; + for idx in 0..split_path.len() { + match current.entries.get_mut(&split_path[idx].item) { + Some(next) => { + if idx == (split_path.len() - 1) { + *next = replaced_value.tagged(&tag); + return Some(new_obj.tagged(&tag)); } else { match next.item { Value::Row(ref mut o) => { @@ -549,6 +702,7 @@ impl Value { Value::Row(o) => o.get_data(desc), Value::Block(_) => MaybeOwned::Owned(Value::nothing()), Value::Table(_) => MaybeOwned::Owned(Value::nothing()), + Value::Error(_) => MaybeOwned::Owned(Value::nothing()), } } @@ -558,7 +712,7 @@ impl Value { Value::Block(b) => itertools::join( b.expressions .iter() - .map(|e| e.source(&b.source).to_string()), + .map(|e| e.span.slice(&b.source).to_string()), "; ", ), Value::Row(_) => format!("[table: 1 row]"), @@ -567,6 +721,7 @@ impl Value { l.len(), if l.len() == 1 { "row" } else { "rows" } ), + Value::Error(_) => format!("[error]"), } } @@ -607,22 +762,6 @@ impl Value { } } - pub(crate) fn as_string(&self) -> Result { - match self { - Value::Primitive(Primitive::String(s)) => Ok(s.clone()), - Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())), - // TODO: this should definitely be more general with better errors - other => Err(ShellError::string(format!( - "Expected string, got {:?}", - other - ))), - } - } - pub(crate) fn is_true(&self) -> bool { match self { Value::Primitive(Primitive::Boolean(true)) => true, @@ -675,9 +814,14 @@ impl Value { Value::Primitive(Primitive::Date(s.into())) } - pub fn date_from_str(s: &str) -> Result { - let date = DateTime::parse_from_rfc3339(s) - .map_err(|err| ShellError::string(&format!("Date parse error: {}", err)))?; + pub fn date_from_str(s: Tagged<&str>) -> Result { + let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| { + ShellError::labeled_error( + &format!("Date parse error: {}", err), + "original value", + s.tag, + ) + })?; let date = date.with_timezone(&chrono::offset::Utc); diff --git a/src/data/command.rs b/src/data/command.rs index a2046aa7a..25301e6fa 100644 --- a/src/data/command.rs +++ b/src/data/command.rs @@ -7,7 +7,7 @@ use std::ops::Deref; pub(crate) fn command_dict(command: Arc, tag: impl Into) -> Tagged { let tag = tag.into(); - let mut cmd_dict = TaggedDictBuilder::new(tag); + let mut cmd_dict = TaggedDictBuilder::new(&tag); cmd_dict.insert("name", Value::string(command.name())); @@ -42,7 +42,7 @@ fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into) -> Tagged fn signature_dict(signature: Signature, tag: impl Into) -> Tagged { let tag = tag.into(); - let mut sig = TaggedListBuilder::new(tag); + let mut sig = TaggedListBuilder::new(&tag); for arg in signature.positional.iter() { let is_required = match arg { @@ -50,19 +50,19 @@ fn signature_dict(signature: Signature, tag: impl Into) -> Tagged { PositionalType::Optional(_, _) => false, }; - sig.insert_tagged(for_spec(arg.name(), "argument", is_required, tag)); + sig.insert_tagged(for_spec(arg.name(), "argument", is_required, &tag)); } if let Some(_) = signature.rest_positional { let is_required = false; - sig.insert_tagged(for_spec("rest", "argument", is_required, tag)); + sig.insert_tagged(for_spec("rest", "argument", is_required, &tag)); } for (name, ty) in signature.named.iter() { match ty { - NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, tag)), - NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, tag)), - NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, tag)), + NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)), + NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)), + NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)), } } diff --git a/src/data/config.rs b/src/data/config.rs index 1cb4533d8..26e3e3c7d 100644 --- a/src/data/config.rs +++ b/src/data/config.rs @@ -51,8 +51,9 @@ pub fn user_data() -> Result { } pub fn app_path(app_data_type: AppDataType, display: &str) -> Result { - let path = app_root(app_data_type, &APP_INFO) - .map_err(|err| ShellError::string(&format!("Couldn't open {} path:\n{}", display, err)))?; + let path = app_root(app_data_type, &APP_INFO).map_err(|err| { + ShellError::untagged_runtime_error(&format!("Couldn't open {} path:\n{}", display, err)) + })?; Ok(path) } @@ -74,11 +75,22 @@ pub fn read( let tag = tag.into(); let contents = fs::read_to_string(filename) - .map(|v| v.tagged(tag)) - .map_err(|err| ShellError::string(&format!("Couldn't read config file:\n{}", err)))?; + .map(|v| v.tagged(&tag)) + .map_err(|err| { + ShellError::labeled_error( + &format!("Couldn't read config file:\n{}", err), + "file name", + &tag, + ) + })?; - let parsed: toml::Value = toml::from_str(&contents) - .map_err(|err| ShellError::string(&format!("Couldn't parse config file:\n{}", err)))?; + let parsed: toml::Value = toml::from_str(&contents).map_err(|err| { + ShellError::labeled_error( + &format!("Couldn't parse config file:\n{}", err), + "file name", + &tag, + ) + })?; let value = convert_toml_value_to_nu_value(&parsed, tag); let tag = value.tag(); @@ -86,7 +98,7 @@ pub fn read( Value::Row(Dictionary { entries }) => Ok(entries), other => Err(ShellError::type_error( "Dictionary", - other.type_name().tagged(tag), + other.type_name().tagged(&tag), )), } } diff --git a/src/data/dict.rs b/src/data/dict.rs index c14c86dd9..8f9bb556b 100644 --- a/src/data/dict.rs +++ b/src/data/dict.rs @@ -115,7 +115,7 @@ impl TaggedListBuilder { } pub fn push(&mut self, value: impl Into) { - self.list.push(value.into().tagged(self.tag)); + self.list.push(value.into().tagged(&self.tag)); } pub fn insert_tagged(&mut self, value: impl Into>) { @@ -155,7 +155,7 @@ impl TaggedDictBuilder { } pub fn insert(&mut self, key: impl Into, value: impl Into) { - self.dict.insert(key.into(), value.into().tagged(self.tag)); + self.dict.insert(key.into(), value.into().tagged(&self.tag)); } pub fn insert_tagged(&mut self, key: impl Into, value: impl Into>) { diff --git a/src/data/meta.rs b/src/data/meta.rs index 0a56198e6..2f3f0cc4c 100644 --- a/src/data/meta.rs +++ b/src/data/meta.rs @@ -1,14 +1,52 @@ -use crate::context::{AnchorLocation, SourceMap}; +use crate::context::AnchorLocation; +use crate::parser::parse::parser::TracableContext; use crate::prelude::*; -use crate::Text; use derive_new::new; use getset::Getters; use serde::Deserialize; use serde::Serialize; use std::path::{Path, PathBuf}; -use uuid::Uuid; #[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] +pub struct Spanned { + pub span: Span, + pub item: T, +} + +impl Spanned { + pub fn map(self, input: impl FnOnce(T) -> U) -> Spanned { + let span = self.span; + + let mapped = input(self.item); + mapped.spanned(span) + } +} + +pub trait SpannedItem: Sized { + fn spanned(self, span: impl Into) -> Spanned { + Spanned { + item: self, + span: span.into(), + } + } + + fn spanned_unknown(self) -> Spanned { + Spanned { + item: self, + span: Span::unknown(), + } + } +} +impl SpannedItem for T {} + +impl std::ops::Deref for Spanned { + type Target = T; + + fn deref(&self) -> &T { + &self.item + } +} +#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] pub struct Tagged { pub tag: Tag, pub item: T, @@ -16,7 +54,7 @@ pub struct Tagged { impl HasTag for Tagged { fn tag(&self) -> Tag { - self.tag + self.tag.clone() } } @@ -28,20 +66,23 @@ impl AsRef for Tagged { pub trait TaggedItem: Sized { fn tagged(self, tag: impl Into) -> Tagged { - Tagged::from_item(self, tag.into()) + Tagged { + item: self, + tag: tag.into(), + } } // For now, this is a temporary facility. In many cases, there are other useful spans that we // could be using, such as the original source spans of JSON or Toml files, but we don't yet // have the infrastructure to make that work. fn tagged_unknown(self) -> Tagged { - Tagged::from_item( - self, - Tag { + Tagged { + item: self, + tag: Tag { span: Span::unknown(), - anchor: uuid::Uuid::nil(), + anchor: None, }, - ) + } } } @@ -56,48 +97,29 @@ impl std::ops::Deref for Tagged { } impl Tagged { - pub fn with_tag(self, tag: impl Into) -> Tagged { - Tagged::from_item(self.item, tag) - } - - pub fn from_item(item: T, tag: impl Into) -> Tagged { - Tagged { - item, - tag: tag.into(), - } - } - pub fn map(self, input: impl FnOnce(T) -> U) -> Tagged { let tag = self.tag(); let mapped = input(self.item); - Tagged::from_item(mapped, tag) - } - - pub(crate) fn copy_tag(&self, output: U) -> Tagged { - Tagged::from_item(output, self.tag()) - } - - pub fn source(&self, source: &Text) -> Text { - Text::from(self.tag().slice(source)) + mapped.tagged(tag) } pub fn tag(&self) -> Tag { - self.tag + self.tag.clone() } pub fn span(&self) -> Span { self.tag.span } - pub fn anchor(&self) -> uuid::Uuid { - self.tag.anchor + pub fn anchor(&self) -> Option { + self.tag.anchor.clone() } - pub fn anchor_name(&self, source_map: &SourceMap) -> Option { - match source_map.get(&self.tag.anchor) { - Some(AnchorLocation::File(file)) => Some(file.clone()), - Some(AnchorLocation::Url(url)) => Some(url.clone()), + pub fn anchor_name(&self) -> Option { + match self.tag.anchor { + Some(AnchorLocation::File(ref file)) => Some(file.clone()), + Some(AnchorLocation::Url(ref url)) => Some(url.clone()), _ => None, } } @@ -113,29 +135,32 @@ impl Tagged { impl From<&Tag> for Tag { fn from(input: &Tag) -> Tag { - *input + input.clone() } } -impl From> for Span { - fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span { - Span { - start: input.offset, - end: input.offset + input.fragment.len(), - } +impl From> for Span { + fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Span { + Span::new(input.offset, input.offset + input.fragment.len()) + } +} + +impl From> for Span { + fn from(input: nom_locate::LocatedSpanEx<&str, u64>) -> Span { + Span::new(input.offset, input.offset + input.fragment.len()) } } impl From<( - nom_locate::LocatedSpanEx, - nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, )> for Span { fn from( input: ( - nom_locate::LocatedSpanEx, - nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, ), ) -> Span { Span { @@ -147,10 +172,7 @@ impl impl From<(usize, usize)> for Span { fn from(input: (usize, usize)) -> Span { - Span { - start: input.0, - end: input.1, - } + Span::new(input.0, input.1) } } @@ -164,61 +186,60 @@ impl From<&std::ops::Range> for Span { } #[derive( - Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, + Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new, )] pub struct Tag { - pub anchor: Uuid, + pub anchor: Option, pub span: Span, } impl From for Tag { fn from(span: Span) -> Self { - Tag { - anchor: uuid::Uuid::nil(), - span, - } + Tag { anchor: None, span } } } impl From<&Span> for Tag { fn from(span: &Span) -> Self { Tag { - anchor: uuid::Uuid::nil(), + anchor: None, span: *span, } } } -impl From<(usize, usize, Uuid)> for Tag { - fn from((start, end, anchor): (usize, usize, Uuid)) -> Self { +impl From<(usize, usize, TracableContext)> for Tag { + fn from((start, end, _context): (usize, usize, TracableContext)) -> Self { + Tag { + anchor: None, + span: Span::new(start, end), + } + } +} + +impl From<(usize, usize, AnchorLocation)> for Tag { + fn from((start, end, anchor): (usize, usize, AnchorLocation)) -> Self { + Tag { + anchor: Some(anchor), + span: Span::new(start, end), + } + } +} + +impl From<(usize, usize, Option)> for Tag { + fn from((start, end, anchor): (usize, usize, Option)) -> Self { Tag { anchor, - span: Span { start, end }, + span: Span::new(start, end), } } } -impl From<(usize, usize, Option)> for Tag { - fn from((start, end, anchor): (usize, usize, Option)) -> Self { +impl From> for Tag { + fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag { Tag { - anchor: if let Some(uuid) = anchor { - uuid - } else { - uuid::Uuid::nil() - }, - span: Span { start, end }, - } - } -} - -impl From> for Tag { - fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Tag { - Tag { - anchor: input.extra, - span: Span { - start: input.offset, - end: input.offset + input.fragment.len(), - }, + anchor: None, + span: Span::new(input.offset, input.offset + input.fragment.len()), } } } @@ -237,22 +258,29 @@ impl From<&Tag> for Span { impl Tag { pub fn unknown_anchor(span: Span) -> Tag { + Tag { anchor: None, span } + } + + pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag { Tag { - anchor: uuid::Uuid::nil(), - span, + anchor: Some(anchor), + span: Span { + start: pos, + end: pos + 1, + }, } } - pub fn unknown_span(anchor: Uuid) -> Tag { + pub fn unknown_span(anchor: AnchorLocation) -> Tag { Tag { - anchor, + anchor: Some(anchor), span: Span::unknown(), } } pub fn unknown() -> Tag { Tag { - anchor: uuid::Uuid::nil(), + anchor: None, span: Span::unknown(), } } @@ -265,29 +293,73 @@ impl Tag { ); Tag { - span: Span { - start: self.span.start, - end: other.span.end, - }, - anchor: self.anchor, + span: Span::new(self.span.start, other.span.end), + anchor: self.anchor.clone(), + } + } + + pub fn until_option(&self, other: Option>) -> Tag { + match other { + Some(other) => { + let other = other.into(); + debug_assert!( + self.anchor == other.anchor, + "Can only merge two tags with the same anchor" + ); + + Tag { + span: Span::new(self.span.start, other.span.end), + anchor: self.anchor.clone(), + } + } + None => self.clone(), } } pub fn slice<'a>(&self, source: &'a str) -> &'a str { self.span.slice(source) } + + pub fn string<'a>(&self, source: &'a str) -> String { + self.span.slice(source).to_string() + } + + pub fn tagged_slice<'a>(&self, source: &'a str) -> Tagged<&'a str> { + self.span.slice(source).tagged(self) + } + + pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged { + self.span.slice(source).to_string().tagged(self) + } +} + +#[allow(unused)] +pub fn tag_for_tagged_list(mut iter: impl Iterator) -> Tag { + let first = iter.next(); + + let first = match first { + None => return Tag::unknown(), + Some(first) => first, + }; + + let last = iter.last(); + + match last { + None => first, + Some(last) => first.until(last), + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)] pub struct Span { - pub(crate) start: usize, - pub(crate) end: usize, + start: usize, + end: usize, } impl From> for Span { fn from(input: Option) -> Span { match input { - None => Span { start: 0, end: 0 }, + None => Span::new(0, 0), Some(span) => span, } } @@ -295,7 +367,54 @@ impl From> for Span { impl Span { pub fn unknown() -> Span { - Span { start: 0, end: 0 } + Span::new(0, 0) + } + + pub fn new(start: usize, end: usize) -> Span { + assert!( + end >= start, + "Can't create a Span whose end < start, start={}, end={}", + start, + end + ); + + Span { start, end } + } + + pub fn for_char(pos: usize) -> Span { + Span { + start: pos, + end: pos + 1, + } + } + + pub fn until(&self, other: impl Into) -> Span { + let other = other.into(); + + Span::new(self.start, other.end) + } + + pub fn until_option(&self, other: Option>) -> Span { + match other { + Some(other) => { + let other = other.into(); + + Span::new(self.start, other.end) + } + None => *self, + } + } + + pub fn string<'a>(&self, source: &'a str) -> String { + self.slice(source).to_string() + } + + pub fn spanned_slice<'a>(&self, source: &'a str) -> Spanned<&'a str> { + self.slice(source).spanned(*self) + } + + pub fn spanned_string<'a>(&self, source: &'a str) -> Spanned { + self.slice(source).to_string().spanned(*self) } /* @@ -308,6 +427,14 @@ impl Span { } */ + pub fn start(&self) -> usize { + self.start + } + + pub fn end(&self) -> usize { + self.end + } + pub fn is_unknown(&self) -> bool { self.start == 0 && self.end == 0 } @@ -319,17 +446,11 @@ impl Span { impl language_reporting::ReportingSpan for Span { fn with_start(&self, start: usize) -> Self { - Span { - start, - end: self.end, - } + Span::new(start, self.end) } fn with_end(&self, end: usize) -> Self { - Span { - start: self.start, - end, - } + Span::new(self.start, end) } fn start(&self) -> usize { @@ -340,33 +461,3 @@ impl language_reporting::ReportingSpan for Span { self.end } } - -impl language_reporting::ReportingSpan for Tag { - fn with_start(&self, start: usize) -> Self { - Tag { - span: Span { - start, - end: self.span.end, - }, - anchor: self.anchor, - } - } - - fn with_end(&self, end: usize) -> Self { - Tag { - span: Span { - start: self.span.start, - end, - }, - anchor: self.anchor, - } - } - - fn start(&self) -> usize { - self.span.start - } - - fn end(&self) -> usize { - self.span.end - } -} diff --git a/src/data/types.rs b/src/data/types.rs index 8dca43d87..b4ff545de 100644 --- a/src/data/types.rs +++ b/src/data/types.rs @@ -54,7 +54,7 @@ impl ExtractType for i64 { &Tagged { item: Value::Primitive(Primitive::Int(int)), .. - } => Ok(int.tagged(value.tag).coerce_into("converting to i64")?), + } => Ok(int.tagged(&value.tag).coerce_into("converting to i64")?), other => Err(ShellError::type_error("Integer", other.tagged_type_name())), } } @@ -68,7 +68,7 @@ impl ExtractType for u64 { &Tagged { item: Value::Primitive(Primitive::Int(int)), .. - } => Ok(int.tagged(value.tag).coerce_into("converting to u64")?), + } => Ok(int.tagged(&value.tag).coerce_into("converting to u64")?), other => Err(ShellError::type_error("Integer", other.tagged_type_name())), } } diff --git a/src/errors.rs b/src/errors.rs index 7e9c14b23..11628dde4 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -1,5 +1,6 @@ use crate::prelude::*; +use crate::parser::parse::parser::TracableContext; use ansi_term::Color; use derive_new::new; use language_reporting::{Diagnostic, Label, Severity}; @@ -13,12 +14,20 @@ pub enum Description { } impl Description { - fn into_label(self) -> Result, String> { + fn into_label(self) -> Result, String> { match self { - Description::Source(s) => Ok(Label::new_primary(s.tag()).with_message(s.item)), + Description::Source(s) => Ok(Label::new_primary(s.span()).with_message(s.item)), Description::Synthetic(s) => Err(s), } } + + #[allow(unused)] + fn tag(&self) -> Tag { + match self { + Description::Source(tagged) => tagged.tag.clone(), + Description::Synthetic(_) => Tag::unknown(), + } + } } #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] @@ -35,6 +44,13 @@ pub struct ShellError { cause: Option>, } +impl ShellError { + #[allow(unused)] + pub(crate) fn tag(&self) -> Option { + self.error.tag() + } +} + impl ToDebug for ShellError { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { self.error.fmt_debug(f, source) @@ -46,12 +62,12 @@ impl serde::de::Error for ShellError { where T: std::fmt::Display, { - ShellError::string(msg.to_string()) + ShellError::untagged_runtime_error(msg.to_string()) } } impl ShellError { - pub(crate) fn type_error( + pub fn type_error( expected: impl Into, actual: Tagged>, ) -> ShellError { @@ -62,6 +78,21 @@ impl ShellError { .start() } + pub fn untagged_runtime_error(error: impl Into) -> ShellError { + ProximateShellError::UntaggedRuntimeError { + reason: error.into(), + } + .start() + } + + pub(crate) fn unexpected_eof(expected: impl Into, tag: impl Into) -> ShellError { + ProximateShellError::UnexpectedEof { + expected: expected.into(), + tag: tag.into(), + } + .start() + } + pub(crate) fn range_error( expected: impl Into, actual: &Tagged, @@ -69,7 +100,7 @@ impl ShellError { ) -> ShellError { ProximateShellError::RangeError { kind: expected.into(), - actual_kind: actual.copy_tag(format!("{:?}", actual.item)), + actual_kind: format!("{:?}", actual.item).tagged(actual.tag()), operation, } .start() @@ -82,6 +113,7 @@ impl ShellError { .start() } + #[allow(unused)] pub(crate) fn invalid_command(problem: impl Into) -> ShellError { ProximateShellError::InvalidCommand { command: problem.into(), @@ -111,29 +143,29 @@ impl ShellError { pub(crate) fn argument_error( command: impl Into, kind: ArgumentError, - tag: Tag, + tag: impl Into, ) -> ShellError { ProximateShellError::ArgumentError { command: command.into(), error: kind, - tag, + tag: tag.into(), } .start() } - pub(crate) fn invalid_external_word(tag: Tag) -> ShellError { + pub(crate) fn invalid_external_word(tag: impl Into) -> ShellError { ProximateShellError::ArgumentError { command: "Invalid argument to Nu command (did you mean to call an external command?)" .into(), error: ArgumentError::InvalidExternalWord, - tag, + tag: tag.into(), } .start() } pub(crate) fn parse_error( error: nom::Err<( - nom_locate::LocatedSpanEx<&str, uuid::Uuid>, + nom_locate::LocatedSpanEx<&str, TracableContext>, nom::error::ErrorKind, )>, ) -> ShellError { @@ -151,25 +183,22 @@ impl ShellError { } nom::Err::Failure(span) | nom::Err::Error(span) => { let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error")) - .with_label(Label::new_primary(Tag::from(span.0))); + .with_label(Label::new_primary(Span::from(span.0))); ShellError::diagnostic(diagnostic) } } } - pub(crate) fn diagnostic(diagnostic: Diagnostic) -> ShellError { + pub(crate) fn diagnostic(diagnostic: Diagnostic) -> ShellError { ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start() } - pub(crate) fn to_diagnostic(self) -> Diagnostic { + pub(crate) fn to_diagnostic(self) -> Diagnostic { match self.error { - ProximateShellError::String(StringError { title, .. }) => { - Diagnostic::new(Severity::Error, title) - } ProximateShellError::InvalidCommand { command } => { Diagnostic::new(Severity::Error, "Invalid command") - .with_label(Label::new_primary(command)) + .with_label(Label::new_primary(command.span)) } ProximateShellError::MissingValue { tag, reason } => { let mut d = Diagnostic::new( @@ -178,7 +207,7 @@ impl ShellError { ); if let Some(tag) = tag { - d = d.with_label(Label::new_primary(tag)); + d = d.with_label(Label::new_primary(tag.span)); } d @@ -191,7 +220,7 @@ impl ShellError { ArgumentError::InvalidExternalWord => Diagnostic::new( Severity::Error, format!("Invalid bare word for Nu command (did you intend to invoke an external command?)")) - .with_label(Label::new_primary(tag)), + .with_label(Label::new_primary(tag.span)), ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new( Severity::Error, format!( @@ -201,7 +230,7 @@ impl ShellError { Color::Black.bold().paint(name) ), ) - .with_label(Label::new_primary(tag)), + .with_label(Label::new_primary(tag.span)), ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new( Severity::Error, format!( @@ -211,7 +240,7 @@ impl ShellError { ), ) .with_label( - Label::new_primary(tag).with_message(format!("requires {} parameter", name)), + Label::new_primary(tag.span).with_message(format!("requires {} parameter", name)), ), ArgumentError::MissingValueForName(name) => Diagnostic::new( Severity::Error, @@ -222,7 +251,7 @@ impl ShellError { Color::Black.bold().paint(name) ), ) - .with_label(Label::new_primary(tag)), + .with_label(Label::new_primary(tag.span)), }, ProximateShellError::TypeError { expected, @@ -232,10 +261,9 @@ impl ShellError { tag, }, } => Diagnostic::new(Severity::Error, "Type Error").with_label( - Label::new_primary(tag) + Label::new_primary(tag.span) .with_message(format!("Expected {}, found {}", expected, actual)), ), - ProximateShellError::TypeError { expected, actual: @@ -244,7 +272,12 @@ impl ShellError { tag }, } => Diagnostic::new(Severity::Error, "Type Error") - .with_label(Label::new_primary(tag).with_message(expected)), + .with_label(Label::new_primary(tag.span).with_message(expected)), + + ProximateShellError::UnexpectedEof { + expected, tag + } => Diagnostic::new(Severity::Error, format!("Unexpected end of input")) + .with_label(Label::new_primary(tag.span).with_message(format!("Expected {}", expected))), ProximateShellError::RangeError { kind, @@ -255,7 +288,7 @@ impl ShellError { tag }, } => Diagnostic::new(Severity::Error, "Range Error").with_label( - Label::new_primary(tag).with_message(format!( + Label::new_primary(tag.span).with_message(format!( "Expected to convert {} to {} while {}, but it was out of range", item, kind.desc(), @@ -267,12 +300,12 @@ impl ShellError { problem: Tagged { tag, - .. + item }, } => Diagnostic::new(Severity::Error, "Syntax Error") - .with_label(Label::new_primary(tag).with_message("Unexpected external command")), + .with_label(Label::new_primary(tag.span).with_message(item)), - ProximateShellError::MissingProperty { subpath, expr } => { + ProximateShellError::MissingProperty { subpath, expr, .. } => { let subpath = subpath.into_label(); let expr = expr.into_label(); @@ -293,9 +326,11 @@ impl ShellError { ProximateShellError::Diagnostic(diag) => diag.diagnostic, ProximateShellError::CoerceError { left, right } => { Diagnostic::new(Severity::Error, "Coercion error") - .with_label(Label::new_primary(left.tag()).with_message(left.item)) - .with_label(Label::new_secondary(right.tag()).with_message(right.item)) + .with_label(Label::new_primary(left.tag().span).with_message(left.item)) + .with_label(Label::new_secondary(right.tag().span).with_message(right.item)) } + + ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason)) } } @@ -306,7 +341,7 @@ impl ShellError { ) -> ShellError { ShellError::diagnostic( Diagnostic::new(Severity::Error, msg.into()) - .with_label(Label::new_primary(tag.into()).with_message(label.into())), + .with_label(Label::new_primary(tag.into().span).with_message(label.into())), ) } @@ -320,25 +355,29 @@ impl ShellError { ShellError::diagnostic( Diagnostic::new_error(msg.into()) .with_label( - Label::new_primary(primary_span.into()).with_message(primary_label.into()), + Label::new_primary(primary_span.into().span).with_message(primary_label.into()), ) .with_label( - Label::new_secondary(secondary_span.into()) + Label::new_secondary(secondary_span.into().span) .with_message(secondary_label.into()), ), ) } - pub fn string(title: impl Into) -> ShellError { - ProximateShellError::String(StringError::new(title.into(), Value::nothing())).start() - } + // pub fn string(title: impl Into) -> ShellError { + // ProximateShellError::String(StringError::new(title.into(), String::new())).start() + // } pub(crate) fn unimplemented(title: impl Into) -> ShellError { - ShellError::string(&format!("Unimplemented: {}", title.into())) + ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into())) } pub(crate) fn unexpected(title: impl Into) -> ShellError { - ShellError::string(&format!("Unexpected: {}", title.into())) + ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into())) + } + + pub(crate) fn unreachable(title: impl Into) -> ShellError { + ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into())) } } @@ -383,10 +422,13 @@ impl ExpectedRange { #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] pub enum ProximateShellError { - String(StringError), SyntaxError { problem: Tagged, }, + UnexpectedEof { + expected: String, + tag: Tag, + }, InvalidCommand { command: Tag, }, @@ -397,6 +439,7 @@ pub enum ProximateShellError { MissingProperty { subpath: Description, expr: Description, + tag: Tag, }, MissingValue { tag: Option, @@ -417,6 +460,9 @@ pub enum ProximateShellError { left: Tagged, right: Tagged, }, + UntaggedRuntimeError { + reason: String, + }, } impl ProximateShellError { @@ -426,6 +472,22 @@ impl ProximateShellError { error: self, } } + + pub(crate) fn tag(&self) -> Option { + Some(match self { + ProximateShellError::SyntaxError { problem } => problem.tag(), + ProximateShellError::UnexpectedEof { tag, .. } => tag.clone(), + ProximateShellError::InvalidCommand { command } => command.clone(), + ProximateShellError::TypeError { actual, .. } => actual.tag.clone(), + ProximateShellError::MissingProperty { tag, .. } => tag.clone(), + ProximateShellError::MissingValue { tag, .. } => return tag.clone(), + ProximateShellError::ArgumentError { tag, .. } => tag.clone(), + ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag.clone(), + ProximateShellError::Diagnostic(..) => return None, + ProximateShellError::UntaggedRuntimeError { .. } => return None, + ProximateShellError::CoerceError { left, right } => left.tag.until(&right.tag), + }) + } } impl ToDebug for ProximateShellError { @@ -437,7 +499,7 @@ impl ToDebug for ProximateShellError { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ShellDiagnostic { - pub(crate) diagnostic: Diagnostic, + pub(crate) diagnostic: Diagnostic, } impl PartialEq for ShellDiagnostic { @@ -463,22 +525,23 @@ impl std::cmp::Ord for ShellDiagnostic { #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)] pub struct StringError { title: String, - error: Value, + error: String, } impl std::fmt::Display for ShellError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match &self.error { - ProximateShellError::String(s) => write!(f, "{}", &s.title), ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"), ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"), ProximateShellError::TypeError { .. } => write!(f, "TypeError"), + ProximateShellError::UnexpectedEof { .. } => write!(f, "UnexpectedEof"), ProximateShellError::RangeError { .. } => write!(f, "RangeError"), ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"), ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"), ProximateShellError::ArgumentError { .. } => write!(f, "ArgumentError"), ProximateShellError::Diagnostic(_) => write!(f, ""), ProximateShellError::CoerceError { .. } => write!(f, "CoerceError"), + ProximateShellError::UntaggedRuntimeError { .. } => write!(f, "UntaggedRuntimeError"), } } } @@ -487,71 +550,43 @@ impl std::error::Error for ShellError {} impl std::convert::From> for ShellError { fn from(input: Box) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: std::io::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: subprocess::PopenError) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: serde_yaml::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From for ShellError { fn from(input: toml::ser::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From for ShellError { fn from(input: serde_json::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From> for ShellError { fn from(input: Box) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } @@ -567,7 +602,6 @@ impl ShellErrorUtils> for Option> { } } } - pub trait CoerceInto { fn coerce_into(self, operation: impl Into) -> Result; } diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index a111d3964..75eb2f466 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -7,6 +7,8 @@ use crate::parser::{ use crate::prelude::*; use derive_new::new; use indexmap::IndexMap; +use log::trace; +use std::fmt; #[derive(new)] pub struct Scope { @@ -15,6 +17,15 @@ pub struct Scope { vars: IndexMap>, } +impl fmt::Display for Scope { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_map() + .entry(&"$it", &format!("{:?}", self.it.item)) + .entries(self.vars.iter().map(|(k, v)| (k, &v.item))) + .finish() + } +} + impl Scope { pub(crate) fn empty() -> Scope { Scope { @@ -37,28 +48,41 @@ pub(crate) fn evaluate_baseline_expr( scope: &Scope, source: &Text, ) -> Result, ShellError> { + let tag = Tag { + span: expr.span, + anchor: None, + }; match &expr.item { - RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_tag(literal), source)), + RawExpression::Literal(literal) => Ok(evaluate_literal(literal.tagged(tag), source)), RawExpression::ExternalWord => Err(ShellError::argument_error( "Invalid external word", ArgumentError::InvalidExternalWord, - expr.tag(), + tag, )), - RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(expr.tag())), + RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(tag)), RawExpression::Synthetic(hir::Synthetic::String(s)) => { Ok(Value::string(s).tagged_unknown()) } - RawExpression::Variable(var) => evaluate_reference(var, scope, source), + RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag), + RawExpression::Command(_) => evaluate_command(tag, scope, source), RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source), RawExpression::Binary(binary) => { let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?; let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?; + trace!("left={:?} right={:?}", left.item, right.item); + match left.compare(binary.op(), &*right) { - Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())), + Ok(result) => Ok(Value::boolean(result).tagged(tag)), Err((left_type, right_type)) => Err(ShellError::coerce_error( - binary.left().copy_tag(left_type), - binary.right().copy_tag(right_type), + left_type.tagged(Tag { + span: binary.left().span, + anchor: None, + }), + right_type.tagged(Tag { + span: binary.right().span, + anchor: None, + }), )), } } @@ -70,13 +94,10 @@ pub(crate) fn evaluate_baseline_expr( exprs.push(expr); } - Ok(Value::Table(exprs).tagged(expr.tag())) + Ok(Value::Table(exprs).tagged(tag)) } RawExpression::Block(block) => { - Ok( - Value::Block(Block::new(block.clone(), source.clone(), expr.tag())) - .tagged(expr.tag()), - ) + Ok(Value::Block(Block::new(block.clone(), source.clone(), tag.clone())).tagged(&tag)) } RawExpression::Path(path) => { let value = evaluate_baseline_expr(path.head(), registry, scope, source)?; @@ -96,19 +117,27 @@ pub(crate) fn evaluate_baseline_expr( possible_matches.sort(); - return Err(ShellError::labeled_error( - "Unknown column", - format!("did you mean '{}'?", possible_matches[0].1), - expr.tag(), - )); + if possible_matches.len() > 0 { + return Err(ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + &tag, + )); + } else { + return Err(ShellError::labeled_error( + "Unknown column", + "row does not have this column", + &tag, + )); + } } Some(next) => { - item = next.clone().item.tagged(expr.tag()); + item = next.clone().item.tagged(&tag); } }; } - Ok(item.item().clone().tagged(expr.tag())) + Ok(item.item().clone().tagged(tag)) } RawExpression::Boolean(_boolean) => unimplemented!(), } @@ -130,14 +159,16 @@ fn evaluate_reference( name: &hir::Variable, scope: &Scope, source: &Text, + tag: Tag, ) -> Result, ShellError> { + trace!("Evaluating {} with Scope {}", name, scope); match name { - hir::Variable::It(tag) => Ok(scope.it.item.clone().tagged(*tag)), - hir::Variable::Other(tag) => Ok(scope + hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)), + hir::Variable::Other(inner) => Ok(scope .vars - .get(tag.slice(source)) + .get(inner.slice(source)) .map(|v| v.clone()) - .unwrap_or_else(|| Value::nothing().tagged(*tag))), + .unwrap_or_else(|| Value::nothing().tagged(tag))), } } @@ -150,3 +181,7 @@ fn evaluate_external( "Unexpected external command".tagged(*external.name()), )) } + +fn evaluate_command(tag: Tag, _scope: &Scope, _source: &Text) -> Result, ShellError> { + Err(ShellError::syntax_error("Unexpected command".tagged(tag))) +} diff --git a/src/format/generic.rs b/src/format/generic.rs index b6f9e29f2..fd058f31f 100644 --- a/src/format/generic.rs +++ b/src/format/generic.rs @@ -14,7 +14,7 @@ impl RenderView for GenericView<'_> { match self.value { Value::Primitive(p) => Ok(host.stdout(&p.format(None))), Value::Table(l) => { - let view = TableView::from_list(l); + let view = TableView::from_list(l, 0); if let Some(view) = view { view.render_view(host)?; @@ -35,6 +35,8 @@ impl RenderView for GenericView<'_> { view.render_view(host)?; Ok(()) } + + Value::Error(e) => Err(e.clone()), } } } diff --git a/src/format/table.rs b/src/format/table.rs index 286be222c..f4b318dae 100644 --- a/src/format/table.rs +++ b/src/format/table.rs @@ -34,7 +34,7 @@ impl TableView { ret } - pub fn from_list(values: &[Tagged]) -> Option { + pub fn from_list(values: &[Tagged], starting_idx: usize) -> Option { if values.len() == 0 { return None; } @@ -42,7 +42,7 @@ impl TableView { let mut headers = TableView::merge_descriptors(values); if headers.len() == 0 { - headers.push("value".to_string()); + headers.push("".to_string()); } let mut entries = vec![]; @@ -68,7 +68,7 @@ impl TableView { if values.len() > 1 { // Indices are black, bold, right-aligned: - row.insert(0, (format!("{}", idx.to_string()), "Fdbr")); + row.insert(0, (format!("{}", (starting_idx + idx).to_string()), "Fdbr")); } entries.push(row); diff --git a/src/lib.rs b/src/lib.rs index e8e09aacd..bfcaa4510 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,4 @@ -#![recursion_limit = "512"] +#![recursion_limit = "1024"] #[macro_use] mod prelude; @@ -21,7 +21,7 @@ mod traits; mod utils; pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue}; -pub use crate::context::{AnchorLocation, SourceMap}; +pub use crate::context::AnchorLocation; pub use crate::env::host::BasicHost; pub use crate::parser::hir::SyntaxShape; pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder; @@ -31,7 +31,7 @@ pub use cli::cli; pub use data::base::{Primitive, Value}; pub use data::config::{config_path, APP_INFO}; pub use data::dict::{Dictionary, TaggedDictBuilder}; -pub use data::meta::{Tag, Tagged, TaggedItem}; +pub use data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem}; pub use errors::{CoerceInto, ShellError}; pub use num_traits::cast::ToPrimitive; pub use parser::parse::text::Text; diff --git a/src/main.rs b/src/main.rs index 7f82808e7..4b10944a2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,6 +3,9 @@ use log::LevelFilter; use std::error::Error; fn main() -> Result<(), Box> { + #[cfg(feature1)] + println!("feature1 is enabled"); + let matches = App::new("nushell") .version(clap::crate_version!()) .arg( diff --git a/src/parser.rs b/src/parser.rs index 138125769..37c8c09c3 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -7,24 +7,24 @@ pub(crate) mod registry; use crate::errors::ShellError; pub(crate) use deserializer::ConfigDeserializer; -pub(crate) use hir::baseline_parse_tokens::baseline_parse_tokens; +pub(crate) use hir::syntax_shape::flat_shape::FlatShape; +pub(crate) use hir::TokensIterator; pub(crate) use parse::call_node::CallNode; pub(crate) use parse::files::Files; -pub(crate) use parse::flag::Flag; +pub(crate) use parse::flag::{Flag, FlagKind}; pub(crate) use parse::operator::Operator; pub(crate) use parse::parser::{nom_input, pipeline}; pub(crate) use parse::pipeline::{Pipeline, PipelineElement}; pub(crate) use parse::text::Text; -pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode}; -pub(crate) use parse::tokens::{RawToken, Token}; +pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; +pub(crate) use parse::tokens::{RawNumber, RawToken}; pub(crate) use parse::unit::Unit; -pub(crate) use parse_command::parse_command; pub(crate) use registry::CommandRegistry; -pub fn parse(input: &str, anchor: uuid::Uuid) -> Result { +pub fn parse(input: &str) -> Result { let _ = pretty_env_logger::try_init(); - match pipeline(nom_input(input, anchor)) { + match pipeline(nom_input(input)) { Ok((_rest, val)) => Ok(val), Err(err) => Err(ShellError::parse_error(err)), } diff --git a/src/parser/deserializer.rs b/src/parser/deserializer.rs index f9b9146e5..4b8bf913d 100644 --- a/src/parser/deserializer.rs +++ b/src/parser/deserializer.rs @@ -52,7 +52,7 @@ impl<'de> ConfigDeserializer<'de> { self.stack.push(DeserializerItem { key_struct_field: Some((name.to_string(), name)), - val: value.unwrap_or_else(|| Value::nothing().tagged(self.call.name_tag)), + val: value.unwrap_or_else(|| Value::nothing().tagged(&self.call.name_tag)), }); Ok(()) @@ -310,9 +310,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { return Ok(r); } trace!( - "deserializing struct {:?} {:?} (stack={:?})", + "deserializing struct {:?} {:?} (saw_root={} stack={:?})", name, fields, + self.saw_root, self.stack ); @@ -326,6 +327,12 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { let type_name = std::any::type_name::(); let tagged_val_name = std::any::type_name::>(); + trace!( + "type_name={} tagged_val_name={}", + type_name, + tagged_val_name + ); + if type_name == tagged_val_name { return visit::, _>(value.val, name, fields, visitor); } diff --git a/src/parser/hir.rs b/src/parser/hir.rs index 96eb7272a..ac6423943 100644 --- a/src/parser/hir.rs +++ b/src/parser/hir.rs @@ -1,11 +1,13 @@ pub(crate) mod baseline_parse; -pub(crate) mod baseline_parse_tokens; pub(crate) mod binary; +pub(crate) mod expand_external_tokens; pub(crate) mod external_command; pub(crate) mod named; pub(crate) mod path; +pub(crate) mod syntax_shape; +pub(crate) mod tokens_iterator; -use crate::parser::{registry, Unit}; +use crate::parser::{registry, Operator, Unit}; use crate::prelude::*; use derive_new::new; use getset::Getters; @@ -14,27 +16,18 @@ use std::fmt; use std::path::PathBuf; use crate::evaluate::Scope; +use crate::parser::parse::tokens::RawNumber; +use crate::traits::ToDebug; -pub(crate) use self::baseline_parse::{ - baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path, - baseline_parse_token_as_pattern, baseline_parse_token_as_string, -}; -pub(crate) use self::baseline_parse_tokens::{baseline_parse_next_expr, TokensIterator}; pub(crate) use self::binary::Binary; pub(crate) use self::external_command::ExternalCommand; pub(crate) use self::named::NamedArguments; pub(crate) use self::path::Path; +pub(crate) use self::syntax_shape::ExpandContext; +pub(crate) use self::tokens_iterator::debug::debug_tokens; +pub(crate) use self::tokens_iterator::TokensIterator; -pub use self::baseline_parse_tokens::SyntaxShape; - -pub fn path(head: impl Into, tail: Vec>>) -> Path { - Path::new( - head.into(), - tail.into_iter() - .map(|item| item.map(|string| string.into())) - .collect(), - ) -} +pub use self::syntax_shape::SyntaxShape; #[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)] pub struct Call { @@ -93,6 +86,7 @@ pub enum RawExpression { FilePath(PathBuf), ExternalCommand(ExternalCommand), + Command(Span), Boolean(bool), } @@ -115,73 +109,148 @@ impl RawExpression { match self { RawExpression::Literal(literal) => literal.type_name(), RawExpression::Synthetic(synthetic) => synthetic.type_name(), - RawExpression::ExternalWord => "externalword", - RawExpression::FilePath(..) => "filepath", + RawExpression::Command(..) => "command", + RawExpression::ExternalWord => "external word", + RawExpression::FilePath(..) => "file path", RawExpression::Variable(..) => "variable", RawExpression::List(..) => "list", RawExpression::Binary(..) => "binary", RawExpression::Block(..) => "block", - RawExpression::Path(..) => "path", + RawExpression::Path(..) => "variable path", RawExpression::Boolean(..) => "boolean", RawExpression::ExternalCommand(..) => "external", } } } -pub type Expression = Tagged; +pub type Expression = Spanned; + +impl std::fmt::Display for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let span = self.span; + + match &self.item { + RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.span)), + RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s), + RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()), + RawExpression::ExternalWord => { + write!(f, "ExternalWord{{ {}..{} }}", span.start(), span.end()) + } + RawExpression::FilePath(file) => write!(f, "Path{{ {} }}", file.display()), + RawExpression::Variable(variable) => write!(f, "{}", variable), + RawExpression::List(list) => f + .debug_list() + .entries(list.iter().map(|e| format!("{}", e))) + .finish(), + RawExpression::Binary(binary) => write!(f, "{}", binary), + RawExpression::Block(items) => { + write!(f, "Block")?; + f.debug_set() + .entries(items.iter().map(|i| format!("{}", i))) + .finish() + } + RawExpression::Path(path) => write!(f, "{}", path), + RawExpression::Boolean(b) => write!(f, "${}", b), + RawExpression::ExternalCommand(..) => { + write!(f, "ExternalComment{{ {}..{} }}", span.start(), span.end()) + } + } + } +} impl Expression { - pub(crate) fn number(i: impl Into, tag: impl Into) -> Expression { - RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into()) + pub(crate) fn number(i: impl Into, span: impl Into) -> Expression { + RawExpression::Literal(Literal::Number(i.into())).spanned(span.into()) } pub(crate) fn size( i: impl Into, unit: impl Into, - tag: impl Into, + span: impl Into, ) -> Expression { - RawExpression::Literal(Literal::Size(i.into(), unit.into())).tagged(tag.into()) + RawExpression::Literal(Literal::Size(i.into(), unit.into())).spanned(span.into()) } pub(crate) fn synthetic_string(s: impl Into) -> Expression { - RawExpression::Synthetic(Synthetic::String(s.into())).tagged_unknown() + RawExpression::Synthetic(Synthetic::String(s.into())).spanned_unknown() } - pub(crate) fn string(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into()) + pub(crate) fn string(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Literal(Literal::String(inner.into())).spanned(outer.into()) } - pub(crate) fn file_path(path: impl Into, outer: impl Into) -> Expression { - RawExpression::FilePath(path.into()).tagged(outer) + pub(crate) fn path( + head: Expression, + tail: Vec>>, + span: impl Into, + ) -> Expression { + let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect(); + RawExpression::Path(Box::new(Path::new(head, tail))).spanned(span.into()) } - pub(crate) fn bare(tag: impl Into) -> Expression { - RawExpression::Literal(Literal::Bare).tagged(tag) + pub(crate) fn dot_member(head: Expression, next: Spanned>) -> Expression { + let Spanned { item, span } = head; + let new_span = head.span.until(next.span); + + match item { + RawExpression::Path(path) => { + let (head, mut tail) = path.parts(); + + tail.push(next.map(|i| i.into())); + Expression::path(head, tail, new_span) + } + + other => Expression::path(other.spanned(span), vec![next], new_span), + } } - pub(crate) fn pattern(tag: impl Into) -> Expression { - RawExpression::Literal(Literal::GlobPattern).tagged(tag.into()) + pub(crate) fn infix( + left: Expression, + op: Spanned>, + right: Expression, + ) -> Expression { + let new_span = left.span.until(right.span); + + RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right))) + .spanned(new_span) } - pub(crate) fn variable(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Variable(Variable::Other(inner.into())).tagged(outer) + pub(crate) fn file_path(path: impl Into, outer: impl Into) -> Expression { + RawExpression::FilePath(path.into()).spanned(outer) } - pub(crate) fn external_command(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).tagged(outer) + pub(crate) fn list(list: Vec, span: impl Into) -> Expression { + RawExpression::List(list).spanned(span) } - pub(crate) fn it_variable(inner: impl Into, outer: impl Into) -> Expression { - RawExpression::Variable(Variable::It(inner.into())).tagged(outer) + pub(crate) fn bare(span: impl Into) -> Expression { + RawExpression::Literal(Literal::Bare).spanned(span) + } + + pub(crate) fn pattern(span: impl Into) -> Expression { + RawExpression::Literal(Literal::GlobPattern).spanned(span.into()) + } + + pub(crate) fn variable(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Variable(Variable::Other(inner.into())).spanned(outer) + } + + pub(crate) fn external_command(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).spanned(outer) + } + + pub(crate) fn it_variable(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Variable(Variable::It(inner.into())).spanned(outer) } } impl ToDebug for Expression { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - match self.item() { - RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source), + match &self.item { + RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source), RawExpression::FilePath(p) => write!(f, "{}", p.display()), - RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)), + RawExpression::ExternalWord => write!(f, "{}", self.span.slice(source)), + RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)), RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s), RawExpression::Variable(Variable::It(_)) => write!(f, "$it"), RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)), @@ -212,8 +281,8 @@ impl ToDebug for Expression { } } -impl From> for Expression { - fn from(path: Tagged) -> Expression { +impl From> for Expression { + fn from(path: Spanned) -> Expression { path.map(|p| RawExpression::Path(Box::new(p))) } } @@ -227,19 +296,39 @@ impl From> for Expression { pub enum Literal { Number(Number), Size(Number, Unit), - String(Tag), + String(Span), GlobPattern, Bare, } -impl ToDebug for Tagged<&Literal> { +impl std::fmt::Display for Tagged { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", Tagged::new(self.tag.clone(), &self.item)) + } +} + +impl std::fmt::Display for Tagged<&Literal> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let span = self.tag.span; + + match &self.item { + Literal::Number(number) => write!(f, "{}", number), + Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()), + Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()), + Literal::GlobPattern => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()), + Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()), + } + } +} + +impl ToDebug for Spanned<&Literal> { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - match self.item() { - Literal::Number(number) => write!(f, "{:?}", *number), + match self.item { + Literal::Number(number) => write!(f, "{:?}", number), Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit), Literal::String(tag) => write!(f, "{}", tag.slice(source)), - Literal::GlobPattern => write!(f, "{}", self.tag().slice(source)), - Literal::Bare => write!(f, "{}", self.tag().slice(source)), + Literal::GlobPattern => write!(f, "{}", self.span.slice(source)), + Literal::Bare => write!(f, "{}", self.span.slice(source)), } } } @@ -258,6 +347,15 @@ impl Literal { #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Variable { - It(Tag), - Other(Tag), + It(Span), + Other(Span), +} + +impl std::fmt::Display for Variable { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Variable::It(_) => write!(f, "$it"), + Variable::Other(span) => write!(f, "${{ {}..{} }}", span.start(), span.end()), + } + } } diff --git a/src/parser/hir/baseline_parse.rs b/src/parser/hir/baseline_parse.rs index 267494f27..87c277195 100644 --- a/src/parser/hir/baseline_parse.rs +++ b/src/parser/hir/baseline_parse.rs @@ -1,140 +1,2 @@ -use crate::context::Context; -use crate::errors::ShellError; -use crate::parser::{hir, RawToken, Token}; -use crate::TaggedItem; -use crate::Text; -use std::path::PathBuf; - -pub fn baseline_parse_single_token( - token: &Token, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()), - RawToken::Size(int, unit) => { - hir::Expression::size(int.to_number(source), unit, token.tag()) - } - RawToken::String(tag) => hir::Expression::string(tag, token.tag()), - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::GlobPattern => hir::Expression::pattern(token.tag()), - RawToken::Bare => hir::Expression::bare(token.tag()), - }) -} - -pub fn baseline_parse_token_as_number( - token: &Token, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()), - RawToken::Size(number, unit) => { - hir::Expression::size(number.to_number(source), unit, token.tag()) - } - RawToken::Bare => hir::Expression::bare(token.tag()), - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "Number", - "glob pattern".to_string().tagged(token.tag()), - )) - } - RawToken::String(tag) => hir::Expression::string(tag, token.tag()), - }) -} - -pub fn baseline_parse_token_as_string( - token: &Token, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(_) => hir::Expression::bare(token.tag()), - RawToken::Size(_, _) => hir::Expression::bare(token.tag()), - RawToken::Bare => hir::Expression::bare(token.tag()), - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "String", - "glob pattern".tagged(token.tag()), - )) - } - RawToken::String(tag) => hir::Expression::string(tag, token.tag()), - }) -} - -pub fn baseline_parse_token_as_path( - token: &Token, - context: &Context, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()), - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(_) => hir::Expression::bare(token.tag()), - RawToken::Size(_, _) => hir::Expression::bare(token.tag()), - RawToken::Bare => { - hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag()) - } - RawToken::GlobPattern => { - return Err(ShellError::type_error( - "Path", - "glob pattern".tagged(token.tag()), - )) - } - RawToken::String(tag) => { - hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag()) - } - }) -} - -pub fn baseline_parse_token_as_pattern( - token: &Token, - context: &Context, - source: &Text, -) -> Result { - Ok(match *token.item() { - RawToken::Variable(tag) if tag.slice(source) == "it" => { - hir::Expression::it_variable(tag, token.tag()) - } - RawToken::ExternalCommand(_) => { - return Err(ShellError::syntax_error( - "Invalid external command".to_string().tagged(token.tag()), - )) - } - RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())), - RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()), - RawToken::Number(_) => hir::Expression::bare(token.tag()), - RawToken::Size(_, _) => hir::Expression::bare(token.tag()), - RawToken::GlobPattern => hir::Expression::pattern(token.tag()), - RawToken::Bare => { - hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag()) - } - RawToken::String(tag) => { - hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag()) - } - }) -} - -pub fn expand_path(string: &str, context: &Context) -> PathBuf { - let expanded = shellexpand::tilde_with_context(string, || context.shell_manager.homedir()); - - PathBuf::from(expanded.as_ref()) -} +#[cfg(test)] +mod tests; diff --git a/src/parser/hir/baseline_parse/tests.rs b/src/parser/hir/baseline_parse/tests.rs new file mode 100644 index 000000000..d3b924849 --- /dev/null +++ b/src/parser/hir/baseline_parse/tests.rs @@ -0,0 +1,139 @@ +use crate::commands::classified::InternalCommand; +use crate::commands::ClassifiedCommand; +use crate::env::host::BasicHost; +use crate::parser::hir; +use crate::parser::hir::syntax_shape::*; +use crate::parser::hir::TokensIterator; +use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b}; +use crate::parser::TokenNode; +use crate::{Span, SpannedItem, Tag, Tagged, Text}; +use pretty_assertions::assert_eq; +use std::fmt::Debug; + +#[test] +fn test_parse_string() { + parse_tokens(StringShape, vec![b::string("hello")], |tokens| { + hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span()) + }); +} + +#[test] +fn test_parse_path() { + parse_tokens( + VariablePathShape, + vec![b::var("it"), b::op("."), b::bare("cpu")], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let bare = tokens[2].expect_bare(); + hir::Expression::path( + hir::Expression::it_variable(inner_var, outer_var), + vec!["cpu".spanned(bare)], + outer_var.until(bare), + ) + }, + ); + + parse_tokens( + VariablePathShape, + vec![ + b::var("cpu"), + b::op("."), + b::bare("amount"), + b::op("."), + b::string("max ghz"), + ], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let amount = tokens[2].expect_bare(); + let (outer_max_ghz, _) = tokens[4].expect_string(); + + hir::Expression::path( + hir::Expression::variable(inner_var, outer_var), + vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)], + outer_var.until(outer_max_ghz), + ) + }, + ); +} + +#[test] +fn test_parse_command() { + parse_tokens( + ClassifiedCommandShape, + vec![b::bare("ls"), b::sp(), b::pattern("*.txt")], + |tokens| { + let bare = tokens[0].expect_bare(); + let pat = tokens[2].span(); + + ClassifiedCommand::Internal(InternalCommand::new( + "ls".to_string(), + Tag { + span: bare, + anchor: None, + }, + hir::Call { + head: Box::new(hir::RawExpression::Command(bare).spanned(bare)), + positional: Some(vec![hir::Expression::pattern(pat)]), + named: None, + }, + )) + // hir::Expression::path( + // hir::Expression::variable(inner_var, outer_var), + // vec!["cpu".tagged(bare)], + // outer_var.until(bare), + // ) + }, + ); + + parse_tokens( + VariablePathShape, + vec![ + b::var("cpu"), + b::op("."), + b::bare("amount"), + b::op("."), + b::string("max ghz"), + ], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let amount = tokens[2].expect_bare(); + let (outer_max_ghz, _) = tokens[4].expect_string(); + + hir::Expression::path( + hir::Expression::variable(inner_var, outer_var), + vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)], + outer_var.until(outer_max_ghz), + ) + }, + ); +} + +fn parse_tokens( + shape: impl ExpandSyntax, + tokens: Vec, + expected: impl FnOnce(Tagged<&[TokenNode]>) -> T, +) { + let tokens = b::token_list(tokens); + let (tokens, source) = b::build(tokens); + + ExpandContext::with_empty(&Text::from(source), |context| { + let tokens = tokens.expect_list(); + let mut iterator = TokensIterator::all(tokens.item, *context.span()); + + let expr = expand_syntax(&shape, &mut iterator, &context); + + let expr = match expr { + Ok(expr) => expr, + Err(err) => { + crate::cli::print_err(err, &BasicHost, context.source().clone()); + panic!("Parse failed"); + } + }; + + assert_eq!(expr, expected(tokens)); + }) +} + +fn inner_string_span(span: Span) -> Span { + Span::new(span.start() + 1, span.end() - 1) +} diff --git a/src/parser/hir/baseline_parse_tokens.rs b/src/parser/hir/baseline_parse_tokens.rs deleted file mode 100644 index 8413bd07e..000000000 --- a/src/parser/hir/baseline_parse_tokens.rs +++ /dev/null @@ -1,459 +0,0 @@ -use crate::context::Context; -use crate::errors::ShellError; -use crate::parser::{ - hir, - hir::{ - baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path, - baseline_parse_token_as_pattern, baseline_parse_token_as_string, - }, - DelimitedNode, Delimiter, PathNode, RawToken, TokenNode, -}; -use crate::{Tag, Tagged, TaggedItem, Text}; -use derive_new::new; -use log::trace; -use serde::{Deserialize, Serialize}; - -pub fn baseline_parse_tokens( - token_nodes: &mut TokensIterator<'_>, - context: &Context, - source: &Text, - syntax_type: SyntaxShape, -) -> Result, ShellError> { - let mut exprs: Vec = vec![]; - - loop { - if token_nodes.at_end() { - break; - } - - let expr = baseline_parse_next_expr(token_nodes, context, source, syntax_type)?; - exprs.push(expr); - } - - Ok(exprs) -} - -#[derive(Debug, Copy, Clone, Serialize, Deserialize)] -pub enum SyntaxShape { - Any, - List, - Literal, - String, - Member, - Variable, - Number, - Path, - Pattern, - Binary, - Block, - Boolean, -} - -impl std::fmt::Display for SyntaxShape { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match self { - SyntaxShape::Any => write!(f, "Any"), - SyntaxShape::List => write!(f, "List"), - SyntaxShape::Literal => write!(f, "Literal"), - SyntaxShape::String => write!(f, "String"), - SyntaxShape::Member => write!(f, "Member"), - SyntaxShape::Variable => write!(f, "Variable"), - SyntaxShape::Number => write!(f, "Number"), - SyntaxShape::Path => write!(f, "Path"), - SyntaxShape::Pattern => write!(f, "Pattern"), - SyntaxShape::Binary => write!(f, "Binary"), - SyntaxShape::Block => write!(f, "Block"), - SyntaxShape::Boolean => write!(f, "Boolean"), - } - } -} - -pub fn baseline_parse_next_expr( - tokens: &mut TokensIterator, - context: &Context, - source: &Text, - syntax_type: SyntaxShape, -) -> Result { - let next = tokens - .next() - .ok_or_else(|| ShellError::string("Expected token, found none"))?; - - trace!(target: "nu::parser::parse_one_expr", "syntax_type={:?}, token={:?}", syntax_type, next); - - match (syntax_type, next) { - (SyntaxShape::Path, TokenNode::Token(token)) => { - return baseline_parse_token_as_path(token, context, source) - } - - (SyntaxShape::Path, token) => { - return Err(ShellError::type_error( - "Path", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::Pattern, TokenNode::Token(token)) => { - return baseline_parse_token_as_pattern(token, context, source) - } - - (SyntaxShape::Pattern, token) => { - return Err(ShellError::type_error( - "Path", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::String, TokenNode::Token(token)) => { - return baseline_parse_token_as_string(token, source); - } - - (SyntaxShape::String, token) => { - return Err(ShellError::type_error( - "String", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::Number, TokenNode::Token(token)) => { - return Ok(baseline_parse_token_as_number(token, source)?); - } - - (SyntaxShape::Number, token) => { - return Err(ShellError::type_error( - "Numeric", - token.type_name().tagged(token.tag()), - )) - } - - // TODO: More legit member processing - (SyntaxShape::Member, TokenNode::Token(token)) => { - return baseline_parse_token_as_string(token, source); - } - - (SyntaxShape::Member, token) => { - return Err(ShellError::type_error( - "member", - token.type_name().tagged(token.tag()), - )) - } - - (SyntaxShape::Any, _) => {} - (SyntaxShape::List, _) => {} - (SyntaxShape::Literal, _) => {} - (SyntaxShape::Variable, _) => {} - (SyntaxShape::Binary, _) => {} - (SyntaxShape::Block, _) => {} - (SyntaxShape::Boolean, _) => {} - }; - - let first = baseline_parse_semantic_token(next, context, source)?; - - let possible_op = tokens.peek(); - - let op = match possible_op { - Some(TokenNode::Operator(op)) => op.clone(), - _ => return Ok(first), - }; - - tokens.next(); - - let second = match tokens.next() { - None => { - return Err(ShellError::labeled_error( - "Expected something after an operator", - "operator", - op.tag(), - )) - } - Some(token) => baseline_parse_semantic_token(token, context, source)?, - }; - - // We definitely have a binary expression here -- let's see if we should coerce it into a block - - match syntax_type { - SyntaxShape::Any => { - let tag = first.tag().until(second.tag()); - let binary = hir::Binary::new(first, op, second); - let binary = hir::RawExpression::Binary(Box::new(binary)); - let binary = binary.tagged(tag); - - Ok(binary) - } - - SyntaxShape::Block => { - let tag = first.tag().until(second.tag()); - - let path: Tagged = match first { - Tagged { - item: hir::RawExpression::Literal(hir::Literal::Bare), - tag, - } => { - let string = tag.slice(source).to_string().tagged(tag); - let path = hir::Path::new( - // TODO: Deal with synthetic nodes that have no representation at all in source - hir::RawExpression::Variable(hir::Variable::It(Tag::unknown())) - .tagged(Tag::unknown()), - vec![string], - ); - let path = hir::RawExpression::Path(Box::new(path)); - path.tagged(first.tag()) - } - Tagged { - item: hir::RawExpression::Literal(hir::Literal::String(inner)), - tag, - } => { - let string = inner.slice(source).to_string().tagged(tag); - let path = hir::Path::new( - // TODO: Deal with synthetic nodes that have no representation at all in source - hir::RawExpression::Variable(hir::Variable::It(Tag::unknown())) - .tagged_unknown(), - vec![string], - ); - let path = hir::RawExpression::Path(Box::new(path)); - path.tagged(first.tag()) - } - Tagged { - item: hir::RawExpression::Variable(..), - .. - } => first, - Tagged { tag, item } => { - return Err(ShellError::labeled_error( - "The first part of an un-braced block must be a column name", - item.type_name(), - tag, - )) - } - }; - - let binary = hir::Binary::new(path, op, second); - let binary = hir::RawExpression::Binary(Box::new(binary)); - let binary = binary.tagged(tag); - - let block = hir::RawExpression::Block(vec![binary]); - let block = block.tagged(tag); - - Ok(block) - } - - other => Err(ShellError::unimplemented(format!( - "coerce hint {:?}", - other - ))), - } -} - -pub fn baseline_parse_semantic_token( - token: &TokenNode, - context: &Context, - source: &Text, -) -> Result { - match token { - TokenNode::Token(token) => baseline_parse_single_token(token, source), - TokenNode::Call(_call) => unimplemented!(), - TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, context, source), - TokenNode::Pipeline(_pipeline) => unimplemented!(), - TokenNode::Operator(op) => Err(ShellError::syntax_error( - "Unexpected operator".tagged(op.tag), - )), - TokenNode::Flag(flag) => Err(ShellError::syntax_error("Unexpected flag".tagged(flag.tag))), - TokenNode::Member(tag) => Err(ShellError::syntax_error( - "BUG: Top-level member".tagged(*tag), - )), - TokenNode::Whitespace(tag) => Err(ShellError::syntax_error( - "BUG: Whitespace found during parse".tagged(*tag), - )), - TokenNode::Error(error) => Err(*error.item.clone()), - TokenNode::Path(path) => baseline_parse_path(path, context, source), - } -} - -pub fn baseline_parse_delimited( - token: &Tagged, - context: &Context, - source: &Text, -) -> Result { - match token.delimiter() { - Delimiter::Brace => { - let children = token.children(); - let exprs = baseline_parse_tokens( - &mut TokensIterator::new(children), - context, - source, - SyntaxShape::Any, - )?; - - let expr = hir::RawExpression::Block(exprs); - Ok(expr.tagged(token.tag())) - } - Delimiter::Paren => unimplemented!(), - Delimiter::Square => { - let children = token.children(); - let exprs = baseline_parse_tokens( - &mut TokensIterator::new(children), - context, - source, - SyntaxShape::Any, - )?; - - let expr = hir::RawExpression::List(exprs); - Ok(expr.tagged(token.tag())) - } - } -} - -pub fn baseline_parse_path( - token: &Tagged, - context: &Context, - source: &Text, -) -> Result { - let head = baseline_parse_semantic_token(token.head(), context, source)?; - - let mut tail = vec![]; - - for part in token.tail() { - let string = match part { - TokenNode::Token(token) => match token.item() { - RawToken::Bare => token.tag().slice(source), - RawToken::String(tag) => tag.slice(source), - RawToken::Number(_) - | RawToken::Size(..) - | RawToken::Variable(_) - | RawToken::ExternalCommand(_) - | RawToken::GlobPattern - | RawToken::ExternalWord => { - return Err(ShellError::type_error( - "String", - token.type_name().tagged(part.tag()), - )) - } - }, - - TokenNode::Member(tag) => tag.slice(source), - - // TODO: Make this impossible - other => { - return Err(ShellError::syntax_error( - format!("{} in path", other.type_name()).tagged(other.tag()), - )) - } - } - .to_string(); - - tail.push(string.tagged(part.tag())); - } - - Ok(hir::path(head, tail).tagged(token.tag()).into()) -} - -#[derive(Debug, new)] -pub struct TokensIterator<'a> { - tokens: &'a [TokenNode], - #[new(default)] - index: usize, - #[new(default)] - seen: indexmap::IndexSet, -} - -impl TokensIterator<'_> { - pub fn remove(&mut self, position: usize) { - self.seen.insert(position); - } - - pub fn len(&self) -> usize { - self.tokens.len() - } - - pub fn at_end(&self) -> bool { - for index in self.index..self.tokens.len() { - if !self.seen.contains(&index) { - return false; - } - } - - true - } - - pub fn advance(&mut self) { - self.seen.insert(self.index); - self.index += 1; - } - - pub fn extract(&mut self, f: impl Fn(&TokenNode) -> Option) -> Option<(usize, T)> { - for (i, item) in self.tokens.iter().enumerate() { - if self.seen.contains(&i) { - continue; - } - - match f(item) { - None => { - continue; - } - Some(value) => { - self.seen.insert(i); - return Some((i, value)); - } - } - } - - None - } - - pub fn move_to(&mut self, pos: usize) { - self.index = pos; - } - - pub fn restart(&mut self) { - self.index = 0; - } - - pub fn clone(&self) -> TokensIterator { - TokensIterator { - tokens: self.tokens, - index: self.index, - seen: self.seen.clone(), - } - } - - pub fn peek(&self) -> Option<&TokenNode> { - let mut tokens = self.clone(); - - tokens.next() - } - - pub fn debug_remaining(&self) -> Vec { - let mut tokens = self.clone(); - tokens.restart(); - tokens.cloned().collect() - } -} - -impl<'a> Iterator for TokensIterator<'a> { - type Item = &'a TokenNode; - - fn next(&mut self) -> Option<&'a TokenNode> { - loop { - if self.index >= self.tokens.len() { - return None; - } - - if self.seen.contains(&self.index) { - self.advance(); - continue; - } - - if self.index >= self.tokens.len() { - return None; - } - - match &self.tokens[self.index] { - TokenNode::Whitespace(_) => { - self.advance(); - } - other => { - self.advance(); - return Some(other); - } - } - } - } -} diff --git a/src/parser/hir/binary.rs b/src/parser/hir/binary.rs index 02a4d416e..67c597cb8 100644 --- a/src/parser/hir/binary.rs +++ b/src/parser/hir/binary.rs @@ -1,6 +1,6 @@ use crate::parser::{hir::Expression, Operator}; use crate::prelude::*; -use crate::Tagged; + use derive_new::new; use getset::Getters; use serde::{Deserialize, Serialize}; @@ -12,10 +12,16 @@ use std::fmt; #[get = "pub(crate)"] pub struct Binary { left: Expression, - op: Tagged, + op: Spanned, right: Expression, } +impl fmt::Display for Binary { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "({} {} {})", self.op.as_str(), self.left, self.right) + } +} + impl ToDebug for Binary { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { write!(f, "{}", self.left.debug(source))?; diff --git a/src/parser/hir/expand_external_tokens.rs b/src/parser/hir/expand_external_tokens.rs new file mode 100644 index 000000000..af966945b --- /dev/null +++ b/src/parser/hir/expand_external_tokens.rs @@ -0,0 +1,159 @@ +use crate::errors::ShellError; +use crate::parser::{ + hir::syntax_shape::{ + color_syntax, expand_atom, AtomicToken, ColorSyntax, ExpandContext, ExpansionRule, + MaybeSpaceShape, + }, + FlatShape, TokenNode, TokensIterator, +}; +use crate::{Span, Spanned, Text}; + +pub fn expand_external_tokens( + token_nodes: &mut TokensIterator<'_>, + source: &Text, +) -> Result>, ShellError> { + let mut out: Vec> = vec![]; + + loop { + if let Some(span) = expand_next_expression(token_nodes)? { + out.push(span.spanned_string(source)); + } else { + break; + } + } + + Ok(out) +} + +#[derive(Debug, Copy, Clone)] +pub struct ExternalTokensShape; + +impl ColorSyntax for ExternalTokensShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + loop { + // Allow a space + color_syntax(&MaybeSpaceShape, token_nodes, context, shapes); + + // Process an external expression. External expressions are mostly words, with a + // few exceptions (like $variables and path expansion rules) + match color_syntax(&ExternalExpression, token_nodes, context, shapes).1 { + ExternalExpressionResult::Eof => break, + ExternalExpressionResult::Processed => continue, + } + } + } +} + +pub fn expand_next_expression( + token_nodes: &mut TokensIterator<'_>, +) -> Result, ShellError> { + let first = token_nodes.next_non_ws(); + + let first = match first { + None => return Ok(None), + Some(v) => v, + }; + + let first = triage_external_head(first)?; + let mut last = first; + + loop { + let continuation = triage_continuation(token_nodes)?; + + if let Some(continuation) = continuation { + last = continuation; + } else { + break; + } + } + + Ok(Some(first.until(last))) +} + +fn triage_external_head(node: &TokenNode) -> Result { + Ok(match node { + TokenNode::Token(token) => token.span, + TokenNode::Call(_call) => unimplemented!("TODO: OMG"), + TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"), + TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"), + TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"), + TokenNode::Flag(flag) => flag.span, + TokenNode::Whitespace(_whitespace) => { + unreachable!("This function should be called after next_non_ws()") + } + TokenNode::Error(_error) => unimplemented!("TODO: OMG"), + }) +} + +fn triage_continuation<'a, 'b>( + nodes: &'a mut TokensIterator<'b>, +) -> Result, ShellError> { + let mut peeked = nodes.peek_any(); + + let node = match peeked.node { + None => return Ok(None), + Some(node) => node, + }; + + match &node { + node if node.is_whitespace() => return Ok(None), + TokenNode::Token(..) | TokenNode::Flag(..) => {} + TokenNode::Call(..) => unimplemented!("call"), + TokenNode::Nodes(..) => unimplemented!("nodes"), + TokenNode::Delimited(..) => unimplemented!("delimited"), + TokenNode::Pipeline(..) => unimplemented!("pipeline"), + TokenNode::Whitespace(..) => unimplemented!("whitespace"), + TokenNode::Error(..) => unimplemented!("error"), + } + + peeked.commit(); + Ok(Some(node.span())) +} + +#[must_use] +enum ExternalExpressionResult { + Eof, + Processed, +} + +#[derive(Debug, Copy, Clone)] +struct ExternalExpression; + +impl ColorSyntax for ExternalExpression { + type Info = ExternalExpressionResult; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> ExternalExpressionResult { + let atom = match expand_atom( + token_nodes, + "external word", + context, + ExpansionRule::permissive(), + ) { + Err(_) => unreachable!("TODO: separate infallible expand_atom"), + Ok(Spanned { + item: AtomicToken::Eof { .. }, + .. + }) => return ExternalExpressionResult::Eof, + Ok(atom) => atom, + }; + + atom.color_tokens(shapes); + return ExternalExpressionResult::Processed; + } +} diff --git a/src/parser/hir/external_command.rs b/src/parser/hir/external_command.rs index 28865330d..df71328ca 100644 --- a/src/parser/hir/external_command.rs +++ b/src/parser/hir/external_command.rs @@ -9,7 +9,7 @@ use std::fmt; )] #[get = "pub(crate)"] pub struct ExternalCommand { - name: Tag, + pub(crate) name: Span, } impl ToDebug for ExternalCommand { diff --git a/src/parser/hir/named.rs b/src/parser/hir/named.rs index 838f643be..f7387e4fd 100644 --- a/src/parser/hir/named.rs +++ b/src/parser/hir/named.rs @@ -43,9 +43,13 @@ impl NamedArguments { match switch { None => self.named.insert(name.into(), NamedValue::AbsentSwitch), - Some(flag) => self - .named - .insert(name, NamedValue::PresentSwitch(*flag.name())), + Some(flag) => self.named.insert( + name, + NamedValue::PresentSwitch(Tag { + span: *flag.name(), + anchor: None, + }), + ), }; } diff --git a/src/parser/hir/path.rs b/src/parser/hir/path.rs index f43edf176..586713298 100644 --- a/src/parser/hir/path.rs +++ b/src/parser/hir/path.rs @@ -1,18 +1,47 @@ use crate::parser::hir::Expression; use crate::prelude::*; -use crate::Tagged; use derive_new::new; -use getset::Getters; +use getset::{Getters, MutGetters}; use serde::{Deserialize, Serialize}; use std::fmt; #[derive( - Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new, + Debug, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + Getters, + MutGetters, + Serialize, + Deserialize, + new, )] #[get = "pub(crate)"] pub struct Path { head: Expression, - tail: Vec>, + #[get_mut = "pub(crate)"] + tail: Vec>, +} + +impl fmt::Display for Path { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.head)?; + + for entry in &self.tail { + write!(f, ".{}", entry.item)?; + } + + Ok(()) + } +} + +impl Path { + pub(crate) fn parts(self) -> (Expression, Vec>) { + (self.head, self.tail) + } } impl ToDebug for Path { @@ -20,7 +49,7 @@ impl ToDebug for Path { write!(f, "{}", self.head.debug(source))?; for part in &self.tail { - write!(f, ".{}", part.item())?; + write!(f, ".{}", part.item)?; } Ok(()) diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs new file mode 100644 index 000000000..72fcf9ecb --- /dev/null +++ b/src/parser/hir/syntax_shape.rs @@ -0,0 +1,1268 @@ +mod block; +mod expression; +pub(crate) mod flat_shape; + +use crate::cli::external_command; +use crate::commands::{ + classified::{ClassifiedPipeline, InternalCommand}, + ClassifiedCommand, Command, +}; +use crate::parser::hir::expand_external_tokens::ExternalTokensShape; +use crate::parser::hir::syntax_shape::block::AnyBlockShape; +use crate::parser::hir::tokens_iterator::Peeked; +use crate::parser::parse_command::{parse_command_tail, CommandTailShape}; +use crate::parser::PipelineElement; +use crate::parser::{ + hir, + hir::{debug_tokens, TokensIterator}, + Operator, Pipeline, RawToken, TokenNode, +}; +use crate::prelude::*; +use derive_new::new; +use getset::Getters; +use log::{self, log_enabled, trace}; +use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; + +pub(crate) use self::expression::atom::{expand_atom, AtomicToken, ExpansionRule}; +pub(crate) use self::expression::delimited::{ + color_delimited_square, expand_delimited_square, DelimitedShape, +}; +pub(crate) use self::expression::file_path::FilePathShape; +pub(crate) use self::expression::list::{BackoffColoringMode, ExpressionListShape}; +pub(crate) use self::expression::number::{IntShape, NumberShape}; +pub(crate) use self::expression::pattern::{BarePatternShape, PatternShape}; +pub(crate) use self::expression::string::StringShape; +pub(crate) use self::expression::unit::UnitShape; +pub(crate) use self::expression::variable_path::{ + ColorableDotShape, ColumnPathShape, DotShape, ExpressionContinuation, + ExpressionContinuationShape, MemberShape, PathTailShape, VariablePathShape, +}; +pub(crate) use self::expression::{continue_expression, AnyExpressionShape}; +pub(crate) use self::flat_shape::FlatShape; + +#[derive(Debug, Copy, Clone, Serialize, Deserialize)] +pub enum SyntaxShape { + Any, + List, + String, + Member, + ColumnPath, + Number, + Int, + Path, + Pattern, + Block, +} + +impl FallibleColorSyntax for SyntaxShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + match self { + SyntaxShape::Any => { + color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) + } + SyntaxShape::List => { + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + Ok(()) + } + SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context, shapes), + SyntaxShape::String => color_fallible_syntax_with( + &StringShape, + &FlatShape::String, + token_nodes, + context, + shapes, + ), + SyntaxShape::Member => { + color_fallible_syntax(&MemberShape, token_nodes, context, shapes) + } + SyntaxShape::ColumnPath => { + color_fallible_syntax(&ColumnPathShape, token_nodes, context, shapes) + } + SyntaxShape::Number => { + color_fallible_syntax(&NumberShape, token_nodes, context, shapes) + } + SyntaxShape::Path => { + color_fallible_syntax(&FilePathShape, token_nodes, context, shapes) + } + SyntaxShape::Pattern => { + color_fallible_syntax(&PatternShape, token_nodes, context, shapes) + } + SyntaxShape::Block => { + color_fallible_syntax(&AnyBlockShape, token_nodes, context, shapes) + } + } + } +} + +impl ExpandExpression for SyntaxShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + match self { + SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context), + SyntaxShape::List => Err(ShellError::unimplemented("SyntaxShape:List")), + SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context), + SyntaxShape::String => expand_expr(&StringShape, token_nodes, context), + SyntaxShape::Member => { + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + Ok(syntax.to_expr()) + } + SyntaxShape::ColumnPath => { + let Tagged { item: members, tag } = + expand_syntax(&ColumnPathShape, token_nodes, context)?; + + Ok(hir::Expression::list( + members.into_iter().map(|s| s.to_expr()).collect(), + tag, + )) + } + SyntaxShape::Number => expand_expr(&NumberShape, token_nodes, context), + SyntaxShape::Path => expand_expr(&FilePathShape, token_nodes, context), + SyntaxShape::Pattern => expand_expr(&PatternShape, token_nodes, context), + SyntaxShape::Block => expand_expr(&AnyBlockShape, token_nodes, context), + } + } +} + +impl std::fmt::Display for SyntaxShape { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + SyntaxShape::Any => write!(f, "Any"), + SyntaxShape::List => write!(f, "List"), + SyntaxShape::String => write!(f, "String"), + SyntaxShape::Int => write!(f, "Integer"), + SyntaxShape::Member => write!(f, "Member"), + SyntaxShape::ColumnPath => write!(f, "ColumnPath"), + SyntaxShape::Number => write!(f, "Number"), + SyntaxShape::Path => write!(f, "Path"), + SyntaxShape::Pattern => write!(f, "Pattern"), + SyntaxShape::Block => write!(f, "Block"), + } + } +} + +#[derive(Getters, new)] +pub struct ExpandContext<'context> { + #[get = "pub(crate)"] + registry: &'context CommandRegistry, + #[get = "pub(crate)"] + span: Span, + #[get = "pub(crate)"] + source: &'context Text, + homedir: Option, +} + +impl<'context> ExpandContext<'context> { + pub(crate) fn homedir(&self) -> Option<&Path> { + self.homedir.as_ref().map(|h| h.as_path()) + } + + #[cfg(test)] + pub fn with_empty(source: &Text, callback: impl FnOnce(ExpandContext)) { + let mut registry = CommandRegistry::new(); + registry.insert( + "ls", + crate::commands::whole_stream_command(crate::commands::LS), + ); + + callback(ExpandContext { + registry: ®istry, + span: Span::unknown(), + source, + homedir: None, + }) + } +} + +pub trait TestSyntax: std::fmt::Debug + Copy { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Option>; +} + +pub trait ExpandExpression: std::fmt::Debug + Copy { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +pub trait FallibleColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result; +} + +pub trait ColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info; +} + +// impl ColorSyntax for T +// where +// T: FallibleColorSyntax, +// { +// type Info = Result; +// type Input = T::Input; + +// fn color_syntax<'a, 'b>( +// &self, +// input: &Self::Input, +// token_nodes: &'b mut TokensIterator<'a>, +// context: &ExpandContext, +// shapes: &mut Vec>, +// ) -> Result { +// FallibleColorSyntax::color_syntax(self, input, token_nodes, context, shapes) +// } +// } + +pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy { + type Output: std::fmt::Debug; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + let result = shape.expand_syntax(token_nodes, context); + + match result { + Err(err) => { + trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes, context.source)); + Err(err) + } + + Ok(result) => { + trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes, context.source)); + Ok(result) + } + } +} + +pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> ((), U) { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + let len = shapes.len(); + let result = shape.color_syntax(&(), token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + ((), result) +} + +pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + if token_nodes.at_end() { + trace!(target: "nu::color_syntax", "at eof"); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let len = shapes.len(); + let result = shape.color_syntax(&(), token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + result +} + +pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> ((), U) { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + let len = shapes.len(); + let result = shape.color_syntax(input, token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + ((), result) +} + +pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + if token_nodes.at_end() { + trace!(target: "nu::color_syntax", "at eof"); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let len = shapes.len(); + let result = shape.color_syntax(input, token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + result +} + +pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes, context.source)); + + let result = shape.expand_syntax(token_nodes, context); + + match result { + Err(err) => { + trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes, context.source)); + Err(err) + } + + Ok(result) => { + trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes, context.source)); + Ok(result) + } + } +} + +impl ExpandSyntax for T { + type Output = hir::Expression; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + ExpandExpression::expand_expr(self, token_nodes, context) + } +} + +pub trait SkipSyntax: std::fmt::Debug + Copy { + fn skip<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError>; +} + +enum BarePathState { + Initial, + Seen(Span, Span), + Error(ShellError), +} + +impl BarePathState { + pub fn seen(self, span: Span) -> BarePathState { + match self { + BarePathState::Initial => BarePathState::Seen(span, span), + BarePathState::Seen(start, _) => BarePathState::Seen(start, span), + BarePathState::Error(err) => BarePathState::Error(err), + } + } + + pub fn end(self, peeked: Peeked, reason: impl Into) -> BarePathState { + match self { + BarePathState::Initial => BarePathState::Error(peeked.type_error(reason)), + BarePathState::Seen(start, end) => BarePathState::Seen(start, end), + BarePathState::Error(err) => BarePathState::Error(err), + } + } + + pub fn into_bare(self) -> Result { + match self { + BarePathState::Initial => unreachable!("into_bare in initial state"), + BarePathState::Seen(start, end) => Ok(start.until(end)), + BarePathState::Error(err) => Err(err), + } + } +} + +pub fn expand_bare<'a, 'b>( + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + predicate: impl Fn(&TokenNode) -> bool, +) -> Result { + let mut state = BarePathState::Initial; + + loop { + // Whitespace ends a word + let mut peeked = token_nodes.peek_any(); + + match peeked.node { + None => { + state = state.end(peeked, "word"); + break; + } + Some(node) => { + if predicate(node) { + state = state.seen(node.span()); + peeked.commit(); + } else { + state = state.end(peeked, "word"); + break; + } + } + } + } + + state.into_bare() +} + +#[derive(Debug, Copy, Clone)] +pub struct BarePathShape; + +impl ExpandSyntax for BarePathShape { + type Output = Span; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_bare(token_nodes, context, |token| match token { + TokenNode::Token(Spanned { + item: RawToken::Bare, + .. + }) + | TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + .. + }) => true, + + _ => false, + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BareShape; + +impl FallibleColorSyntax for BareShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.peek_any_token(|token| match token { + // If it's a bare token, color it + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + shapes.push((*input).spanned(*span)); + Ok(()) + } + + // otherwise, fail + other => Err(ShellError::type_error("word", other.tagged_type_name())), + }) + } +} + +impl ExpandSyntax for BareShape { + type Output = Spanned; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("word")?; + + match peeked.node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + peeked.commit(); + Ok(span.spanned_string(context.source)) + } + + other => Err(ShellError::type_error("word", other.tagged_type_name())), + } + } +} + +impl TestSyntax for BareShape { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Option> { + let peeked = token_nodes.peek_any(); + + match peeked.node { + Some(TokenNode::Token(token)) => match token.item { + RawToken::Bare => Some(peeked), + _ => None, + }, + + _ => None, + } + } +} + +#[derive(Debug)] +pub enum CommandSignature { + Internal(Spanned>), + LiteralExternal { outer: Span, inner: Span }, + External(Span), + Expression(hir::Expression), +} + +impl CommandSignature { + pub fn to_expression(&self) -> hir::Expression { + match self { + CommandSignature::Internal(command) => { + let span = command.span; + hir::RawExpression::Command(span).spanned(span) + } + CommandSignature::LiteralExternal { outer, inner } => { + hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner)) + .spanned(*outer) + } + CommandSignature::External(span) => { + hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*span)).spanned(*span) + } + CommandSignature::Expression(expr) => expr.clone(), + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct PipelineShape; + +// The failure mode is if the head of the token stream is not a pipeline +impl FallibleColorSyntax for PipelineShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Make sure we're looking at a pipeline + let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?; + + // Enumerate the pipeline parts + for part in parts { + // If the pipeline part has a prefix `|`, emit a pipe to color + if let Some(pipe) = part.pipe { + shapes.push(FlatShape::Pipe.spanned(pipe)); + } + + // Create a new iterator containing the tokens in the pipeline part to color + let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false); + + color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes); + color_syntax(&CommandShape, &mut token_nodes, context, shapes); + } + + Ok(()) + } +} + +impl ExpandSyntax for PipelineShape { + type Output = ClassifiedPipeline; + fn expand_syntax<'a, 'b>( + &self, + iterator: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let source = context.source; + + let peeked = iterator.peek_any().not_eof("pipeline")?; + let pipeline = peeked.node.as_pipeline()?; + peeked.commit(); + + let Pipeline { parts, .. } = pipeline; + + let commands: Result, ShellError> = parts + .iter() + .map(|item| classify_command(item, context, &source)) + .collect(); + + Ok(ClassifiedPipeline { + commands: commands?, + }) + } +} + +pub enum CommandHeadKind { + External, + Internal(Signature), +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandHeadShape; + +impl FallibleColorSyntax for CommandHeadShape { + type Info = CommandHeadKind; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result { + // If we don't ultimately find a token, roll back + token_nodes.atomic(|token_nodes| { + // First, take a look at the next token + let atom = expand_atom( + token_nodes, + "command head", + context, + ExpansionRule::permissive(), + )?; + + match atom.item { + // If the head is an explicit external command (^cmd), color it as an external command + AtomicToken::ExternalCommand { .. } => { + shapes.push(FlatShape::ExternalCommand.spanned(atom.span)); + Ok(CommandHeadKind::External) + } + + // If the head is a word, it depends on whether it matches a registered internal command + AtomicToken::Word { text } => { + let name = text.slice(context.source); + + if context.registry.has(name) { + // If the registry has the command, color it as an internal command + shapes.push(FlatShape::InternalCommand.spanned(text)); + let command = context.registry.expect_command(name); + Ok(CommandHeadKind::Internal(command.signature())) + } else { + // Otherwise, color it as an external command + shapes.push(FlatShape::ExternalCommand.spanned(text)); + Ok(CommandHeadKind::External) + } + } + + // Otherwise, we're not actually looking at a command + _ => Err(ShellError::syntax_error( + "No command at the head".tagged(atom.span), + )), + } + }) + } +} + +impl ExpandSyntax for CommandHeadShape { + type Output = CommandSignature; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let node = + parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_span, _| { + Ok(match token { + RawToken::ExternalCommand(span) => CommandSignature::LiteralExternal { + outer: token_span, + inner: span, + }, + RawToken::Bare => { + let name = token_span.slice(context.source); + if context.registry.has(name) { + let command = context.registry.expect_command(name); + CommandSignature::Internal(command.spanned(token_span)) + } else { + CommandSignature::External(token_span) + } + } + _ => { + return Err(ShellError::type_error( + "command head2", + token.type_name().tagged(token_span), + )) + } + }) + }); + + match node { + Ok(expr) => return Ok(expr), + Err(_) => match expand_expr(&AnyExpressionShape, token_nodes, context) { + Ok(expr) => return Ok(CommandSignature::Expression(expr)), + Err(_) => Err(token_nodes.peek_non_ws().type_error("command head3")), + }, + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ClassifiedCommandShape; + +impl ExpandSyntax for ClassifiedCommandShape { + type Output = ClassifiedCommand; + + fn expand_syntax<'a, 'b>( + &self, + iterator: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let head = expand_syntax(&CommandHeadShape, iterator, context)?; + + match &head { + CommandSignature::Expression(expr) => Err(ShellError::syntax_error( + "Unexpected expression in command position".tagged(expr.span), + )), + + // If the command starts with `^`, treat it as an external command no matter what + CommandSignature::External(name) => { + let name_str = name.slice(&context.source); + + external_command(iterator, &context.source, name_str.tagged(name)) + } + + CommandSignature::LiteralExternal { outer, inner } => { + let name_str = inner.slice(&context.source); + + external_command(iterator, &context.source, name_str.tagged(outer)) + } + + CommandSignature::Internal(command) => { + let tail = + parse_command_tail(&command.signature(), &context, iterator, command.span)?; + + let (positional, named) = match tail { + None => (None, None), + Some((positional, named)) => (positional, named), + }; + + let call = hir::Call { + head: Box::new(head.to_expression()), + positional, + named, + }; + + Ok(ClassifiedCommand::Internal(InternalCommand::new( + command.item.name().to_string(), + Tag { + span: command.span, + anchor: None, + }, + call, + ))) + } + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct InternalCommandHeadShape; + +impl FallibleColorSyntax for InternalCommandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head4"); + + let peeked_head = match peeked_head { + Err(_) => return Ok(()), + Ok(peeked_head) => peeked_head, + }; + + let _expr = match peeked_head.node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => shapes.push(FlatShape::Word.spanned(*span)), + + TokenNode::Token(Spanned { + item: RawToken::String(_inner_tag), + span, + }) => shapes.push(FlatShape::String.spanned(*span)), + + _node => shapes.push(FlatShape::Error.spanned(peeked_head.node.span())), + }; + + peeked_head.commit(); + + Ok(()) + } +} + +impl ExpandExpression for InternalCommandHeadShape { + fn expand_expr( + &self, + token_nodes: &mut TokensIterator<'_>, + _context: &ExpandContext, + ) -> Result { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head4")?; + + let expr = match peeked_head.node { + TokenNode::Token( + spanned @ Spanned { + item: RawToken::Bare, + .. + }, + ) => spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare)), + + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span, + }) => hir::RawExpression::Literal(hir::Literal::String(*inner_span)).spanned(*span), + + node => { + return Err(ShellError::type_error( + "command head5", + node.tagged_type_name(), + )) + } + }; + + peeked_head.commit(); + + Ok(expr) + } +} + +pub(crate) struct SingleError<'token> { + expected: &'static str, + node: &'token Spanned, +} + +impl<'token> SingleError<'token> { + pub(crate) fn error(&self) -> ShellError { + ShellError::type_error(self.expected, self.node.type_name().tagged(self.node.span)) + } +} + +fn parse_single_node<'a, 'b, T>( + token_nodes: &'b mut TokensIterator<'a>, + expected: &'static str, + callback: impl FnOnce(RawToken, Span, SingleError) -> Result, +) -> Result { + token_nodes.peek_any_token(|node| match node { + TokenNode::Token(token) => callback( + token.item, + token.span, + SingleError { + expected, + node: token, + }, + ), + + other => Err(ShellError::type_error(expected, other.tagged_type_name())), + }) +} + +fn parse_single_node_skipping_ws<'a, 'b, T>( + token_nodes: &'b mut TokensIterator<'a>, + expected: &'static str, + callback: impl FnOnce(RawToken, Span, SingleError) -> Result, +) -> Result { + let peeked = token_nodes.peek_non_ws().not_eof(expected)?; + + let expr = match peeked.node { + TokenNode::Token(token) => callback( + token.item, + token.span, + SingleError { + expected, + node: token, + }, + )?, + + other => return Err(ShellError::type_error(expected, other.tagged_type_name())), + }; + + peeked.commit(); + + Ok(expr) +} + +#[derive(Debug, Copy, Clone)] +pub struct WhitespaceShape; + +impl FallibleColorSyntax for WhitespaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return Ok(()), + Ok(peeked) => peeked, + }; + + let _tag = match peeked.node { + TokenNode::Whitespace(span) => shapes.push(FlatShape::Whitespace.spanned(*span)), + + _other => return Ok(()), + }; + + peeked.commit(); + + Ok(()) + } +} + +impl ExpandSyntax for WhitespaceShape { + type Output = Span; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + let span = match peeked.node { + TokenNode::Whitespace(tag) => *tag, + + other => { + return Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )) + } + }; + + peeked.commit(); + + Ok(span) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct SpacedExpression { + inner: T, +} + +impl ExpandExpression for SpacedExpression { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // TODO: Make the name part of the trait + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(_) => { + peeked.commit(); + expand_expr(&self.inner, token_nodes, context) + } + + other => Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )), + } + } +} + +pub fn maybe_spaced(inner: T) -> MaybeSpacedExpression { + MaybeSpacedExpression { inner } +} + +#[derive(Debug, Copy, Clone)] +pub struct MaybeSpacedExpression { + inner: T, +} + +#[derive(Debug, Copy, Clone)] +pub struct MaybeSpaceShape; + +impl ColorSyntax for MaybeSpaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return, + Ok(peeked) => peeked, + }; + + if let TokenNode::Whitespace(span) = peeked.node { + peeked.commit(); + shapes.push(FlatShape::Whitespace.spanned(*span)); + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct SpaceShape; + +impl FallibleColorSyntax for SpaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(span) => { + peeked.commit(); + shapes.push(FlatShape::Whitespace.spanned(*span)); + Ok(()) + } + + other => Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )), + } + } +} + +impl ExpandExpression for MaybeSpacedExpression { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // TODO: Make the name part of the trait + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(_) => { + peeked.commit(); + expand_expr(&self.inner, token_nodes, context) + } + + _ => { + peeked.rollback(); + expand_expr(&self.inner, token_nodes, context) + } + } + } +} + +pub fn spaced(inner: T) -> SpacedExpression { + SpacedExpression { inner } +} + +fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expression { + if span.slice(source) == "it" { + hir::Expression::it_variable(span, token_span) + } else { + hir::Expression::variable(span, token_span) + } +} + +fn classify_command( + command: &Spanned, + context: &ExpandContext, + source: &Text, +) -> Result { + let mut iterator = TokensIterator::new(&command.tokens.item, command.span, true); + + let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?; + + match &head { + CommandSignature::Expression(_) => Err(ShellError::syntax_error( + "Unexpected expression in command position".tagged(command.span), + )), + + // If the command starts with `^`, treat it as an external command no matter what + CommandSignature::External(name) => { + let name_str = name.slice(source); + + external_command(&mut iterator, source, name_str.tagged(name)) + } + + CommandSignature::LiteralExternal { outer, inner } => { + let name_str = inner.slice(source); + + external_command(&mut iterator, source, name_str.tagged(outer)) + } + + CommandSignature::Internal(command) => { + let tail = + parse_command_tail(&command.signature(), &context, &mut iterator, command.span)?; + + let (positional, named) = match tail { + None => (None, None), + Some((positional, named)) => (positional, named), + }; + + let call = hir::Call { + head: Box::new(head.to_expression()), + positional, + named, + }; + + Ok(ClassifiedCommand::Internal(InternalCommand::new( + command.name().to_string(), + Tag { + span: command.span, + anchor: None, + }, + call, + ))) + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandShape; + +impl ColorSyntax for CommandShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes); + + match kind { + Err(_) => { + // We didn't find a command, so we'll have to fall back to parsing this pipeline part + // as a blob of undifferentiated expressions + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + } + + Ok(CommandHeadKind::External) => { + color_syntax(&ExternalTokensShape, token_nodes, context, shapes); + } + Ok(CommandHeadKind::Internal(signature)) => { + color_syntax_with(&CommandTailShape, &signature, token_nodes, context, shapes); + } + }; + } +} diff --git a/src/parser/hir/syntax_shape/block.rs b/src/parser/hir/syntax_shape/block.rs new file mode 100644 index 000000000..7518d8f94 --- /dev/null +++ b/src/parser/hir/syntax_shape/block.rs @@ -0,0 +1,330 @@ +use crate::errors::ShellError; +use crate::parser::{ + hir, + hir::syntax_shape::{ + color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax, + DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape, + ExpressionListShape, FallibleColorSyntax, FlatShape, MemberShape, PathTailShape, + VariablePathShape, + }, + hir::tokens_iterator::TokensIterator, + parse::token_tree::Delimiter, + RawToken, TokenNode, +}; +use crate::{Span, Spanned, SpannedItem}; + +#[derive(Debug, Copy, Clone)] +pub struct AnyBlockShape; + +impl FallibleColorSyntax for AnyBlockShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let block = token_nodes.peek_non_ws().not_eof("block"); + + let block = match block { + Err(_) => return Ok(()), + Ok(block) => block, + }; + + // is it just a block? + let block = block.node.as_block(); + + match block { + // If so, color it as a block + Some((children, spans)) => { + let mut token_nodes = TokensIterator::new(children.item, context.span, false); + color_syntax_with( + &DelimitedShape, + &(Delimiter::Brace, spans.0, spans.1), + &mut token_nodes, + context, + shapes, + ); + + return Ok(()); + } + _ => {} + } + + // Otherwise, look for a shorthand block. If none found, fail + color_fallible_syntax(&ShorthandBlock, token_nodes, context, shapes) + } +} + +impl ExpandExpression for AnyBlockShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let block = token_nodes.peek_non_ws().not_eof("block")?; + + // is it just a block? + let block = block.node.as_block(); + + match block { + Some((block, _tags)) => { + let mut iterator = TokensIterator::new(&block.item, context.span, false); + + let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?; + + return Ok(hir::RawExpression::Block(exprs).spanned(block.span)); + } + _ => {} + } + + expand_syntax(&ShorthandBlock, token_nodes, context) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ShorthandBlock; + +impl FallibleColorSyntax for ShorthandBlock { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Try to find a shorthand head. If none found, fail + color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes); + + match result { + // if no continuation was found, we're done + Err(_) => break, + // if a continuation was found, look for another one + Ok(_) => continue, + } + } + + Ok(()) + } +} + +impl ExpandExpression for ShorthandBlock { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let path = expand_expr(&ShorthandPath, token_nodes, context)?; + let start = path.span; + let expr = continue_expression(path, token_nodes, context)?; + let end = expr.span; + let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end)); + + Ok(block) + } +} + +/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block +#[derive(Debug, Copy, Clone)] +pub struct ShorthandPath; + +impl FallibleColorSyntax for ShorthandPath { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes); + + match variable { + Ok(_) => { + // if it's a variable path, that's the head part + return Ok(()); + } + + Err(_) => { + // otherwise, we'll try to find a member path + } + } + + // look for a member (`` -> `$it.`) + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = color_fallible_syntax(&PathTailShape, token_nodes, context, shapes); + + match tail { + Ok(_) => {} + Err(_) => { + // It's ok if there's no path tail; a single member is sufficient + } + } + + Ok(()) + }) + } +} + +impl ExpandExpression for ShorthandPath { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // if it's a variable path, that's the head part + let path = expand_expr(&VariablePathShape, token_nodes, context); + + match path { + Ok(path) => return Ok(path), + Err(_) => {} + } + + // Synthesize the head of the shorthand path (`` -> `$it.`) + let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = expand_syntax(&PathTailShape, token_nodes, context); + + match tail { + Err(_) => return Ok(head), + Ok((tail, _)) => { + // For each member that `PathTailShape` expanded, join it onto the existing expression + // to form a new path + for member in tail { + head = hir::Expression::dot_member(head, member); + } + + Ok(head) + } + } + } +} + +/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block +#[derive(Debug, Copy, Clone)] +pub struct ShorthandHeadShape; + +impl FallibleColorSyntax for ShorthandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + peeked.commit(); + shapes.push(FlatShape::BareMember.spanned(*span)); + Ok(()) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Spanned { + item: RawToken::String(_), + span: outer, + }) => { + peeked.commit(); + shapes.push(FlatShape::StringMember.spanned(*outer)); + Ok(()) + } + + other => Err(ShellError::type_error( + "shorthand head", + other.tagged_type_name(), + )), + } + } +} + +impl ExpandExpression for ShorthandHeadShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + // Commit the peeked token + peeked.commit(); + + // Synthesize an `$it` expression + let it = synthetic_it(); + + // Make a path out of `$it` and the bare token as a member + Ok(hir::Expression::path( + it, + vec![span.spanned_string(context.source)], + *span, + )) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Spanned { + item: RawToken::String(inner), + span: outer, + }) => { + // Commit the peeked token + peeked.commit(); + + // Synthesize an `$it` expression + let it = synthetic_it(); + + // Make a path out of `$it` and the bare token as a member + Ok(hir::Expression::path( + it, + vec![inner.string(context.source).spanned(*outer)], + *outer, + )) + } + + // Any other token is not a valid bare head + other => { + return Err(ShellError::type_error( + "shorthand path", + other.tagged_type_name(), + )) + } + } + } +} + +fn synthetic_it() -> hir::Expression { + hir::Expression::it_variable(Span::unknown(), Span::unknown()) +} diff --git a/src/parser/hir/syntax_shape/expression.rs b/src/parser/hir/syntax_shape/expression.rs new file mode 100644 index 000000000..0be63eaeb --- /dev/null +++ b/src/parser/hir/syntax_shape/expression.rs @@ -0,0 +1,308 @@ +pub(crate) mod atom; +pub(crate) mod delimited; +pub(crate) mod file_path; +pub(crate) mod list; +pub(crate) mod number; +pub(crate) mod pattern; +pub(crate) mod string; +pub(crate) mod unit; +pub(crate) mod variable_path; + +use crate::parser::hir::syntax_shape::{ + color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom, + expand_delimited_square, expand_expr, expand_syntax, AtomicToken, BareShape, ColorableDotShape, + DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation, + ExpressionContinuationShape, FallibleColorSyntax, FlatShape, +}; +use crate::parser::{ + hir, + hir::{Expression, TokensIterator}, +}; +use crate::prelude::*; +use std::path::PathBuf; + +#[derive(Debug, Copy, Clone)] +pub struct AnyExpressionShape; + +impl ExpandExpression for AnyExpressionShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // Look for an expression at the cursor + let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?; + + continue_expression(head, token_nodes, context) + } +} + +impl FallibleColorSyntax for AnyExpressionShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Look for an expression at the cursor + color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?; + + match continue_coloring_expression(token_nodes, context, shapes) { + Err(_) => { + // it's fine for there to be no continuation + } + + Ok(()) => {} + } + + Ok(()) + } +} + +pub(crate) fn continue_expression( + mut head: hir::Expression, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, +) -> Result { + loop { + // Check to see whether there's any continuation after the head expression + let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context); + + match continuation { + // If there's no continuation, return the head + Err(_) => return Ok(head), + // Otherwise, form a new expression by combining the head with the continuation + Ok(continuation) => match continuation { + // If the continuation is a `.member`, form a path with the new member + ExpressionContinuation::DotSuffix(_dot, member) => { + head = Expression::dot_member(head, member); + } + + // Otherwise, if the continuation is an infix suffix, form an infix expression + ExpressionContinuation::InfixSuffix(op, expr) => { + head = Expression::infix(head, op, expr); + } + }, + } + } +} + +pub(crate) fn continue_coloring_expression( + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result<(), ShellError> { + // if there's not even one expression continuation, fail + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes); + + match result { + Err(_) => { + // We already saw one continuation, so just return + return Ok(()); + } + + Ok(_) => {} + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct AnyExpressionStartShape; + +impl ExpandExpression for AnyExpressionStartShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?; + + match atom.item { + AtomicToken::Size { number, unit } => { + return Ok(hir::Expression::size( + number.to_number(context.source), + unit.item, + Tag { + span: atom.span, + anchor: None, + }, + )) + } + + AtomicToken::SquareDelimited { nodes, .. } => { + expand_delimited_square(&nodes, atom.span.into(), context) + } + + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + let end = expand_syntax(&BareTailShape, token_nodes, context)?; + Ok(hir::Expression::bare(atom.span.until_option(end))) + } + + other => return other.spanned(atom.span).into_hir(context, "expression"), + } + } +} + +impl FallibleColorSyntax for AnyExpressionStartShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom( + token_nodes, + "expression", + context, + ExpansionRule::permissive(), + ) + }); + + let atom = match atom { + Spanned { + item: Err(_err), + span, + } => { + shapes.push(FlatShape::Error.spanned(span)); + return Ok(()); + } + + Spanned { + item: Ok(value), .. + } => value, + }; + + match atom.item { + AtomicToken::Size { number, unit } => shapes.push( + FlatShape::Size { + number: number.span.into(), + unit: unit.span.into(), + } + .spanned(atom.span), + ), + + AtomicToken::SquareDelimited { nodes, spans } => { + color_delimited_square(spans, &nodes, atom.span.into(), context, shapes) + } + + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + shapes.push(FlatShape::Word.spanned(atom.span)); + } + + _ => atom.color_tokens(shapes), + } + + Ok(()) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BareTailShape; + +impl FallibleColorSyntax for BareTailShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let len = shapes.len(); + + loop { + let word = color_fallible_syntax_with( + &BareShape, + &FlatShape::Word, + token_nodes, + context, + shapes, + ); + + match word { + // if a word was found, continue + Ok(_) => continue, + // if a word wasn't found, try to find a dot + Err(_) => {} + } + + // try to find a dot + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Word, + token_nodes, + context, + shapes, + ); + + match dot { + // if a dot was found, try to find another word + Ok(_) => continue, + // otherwise, we're done + Err(_) => break, + } + } + + if shapes.len() > len { + Ok(()) + } else { + Err(ShellError::syntax_error( + "No tokens matched BareTailShape".tagged_unknown(), + )) + } + } +} + +impl ExpandSyntax for BareTailShape { + type Output = Option; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result, ShellError> { + let mut end: Option = None; + + loop { + match expand_syntax(&BareShape, token_nodes, context) { + Ok(bare) => { + end = Some(bare.span); + continue; + } + + Err(_) => match expand_syntax(&DotShape, token_nodes, context) { + Ok(dot) => { + end = Some(dot); + continue; + } + + Err(_) => break, + }, + } + } + + Ok(end) + } +} + +pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf { + let expanded = shellexpand::tilde_with_context(string, || context.homedir()); + + PathBuf::from(expanded.as_ref()) +} diff --git a/src/parser/hir/syntax_shape/expression/atom.rs b/src/parser/hir/syntax_shape/expression/atom.rs new file mode 100644 index 000000000..bb1b8065e --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/atom.rs @@ -0,0 +1,580 @@ +use crate::parser::hir::syntax_shape::{ + expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape, + BarePatternShape, ExpandContext, UnitShape, +}; +use crate::parser::{ + hir, + hir::{Expression, RawNumber, TokensIterator}, + parse::flag::{Flag, FlagKind}, + DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit, +}; +use crate::prelude::*; +use crate::{Span, Spanned}; + +#[derive(Debug)] +pub enum AtomicToken<'tokens> { + Eof { + span: Span, + }, + Error { + error: Spanned, + }, + Number { + number: RawNumber, + }, + Size { + number: Spanned, + unit: Spanned, + }, + String { + body: Span, + }, + ItVariable { + name: Span, + }, + Variable { + name: Span, + }, + ExternalCommand { + command: Span, + }, + ExternalWord { + text: Span, + }, + GlobPattern { + pattern: Span, + }, + FilePath { + path: Span, + }, + Word { + text: Span, + }, + SquareDelimited { + spans: (Span, Span), + nodes: &'tokens Vec, + }, + ParenDelimited { + span: (Span, Span), + nodes: &'tokens Vec, + }, + BraceDelimited { + spans: (Span, Span), + nodes: &'tokens Vec, + }, + Pipeline { + pipe: Option, + elements: Spanned<&'tokens Vec>, + }, + ShorthandFlag { + name: Span, + }, + LonghandFlag { + name: Span, + }, + Dot { + text: Span, + }, + Operator { + text: Span, + }, + Whitespace { + text: Span, + }, +} + +pub type SpannedAtomicToken<'tokens> = Spanned>; + +impl<'tokens> SpannedAtomicToken<'tokens> { + pub fn into_hir( + &self, + context: &ExpandContext, + expected: &'static str, + ) -> Result { + Ok(match &self.item { + AtomicToken::Eof { .. } => { + return Err(ShellError::type_error( + expected, + "eof atomic token".tagged(self.span), + )) + } + AtomicToken::Error { .. } => { + return Err(ShellError::type_error( + expected, + "eof atomic token".tagged(self.span), + )) + } + AtomicToken::Operator { .. } => { + return Err(ShellError::type_error( + expected, + "operator".tagged(self.span), + )) + } + AtomicToken::ShorthandFlag { .. } => { + return Err(ShellError::type_error( + expected, + "shorthand flag".tagged(self.span), + )) + } + AtomicToken::LonghandFlag { .. } => { + return Err(ShellError::type_error(expected, "flag".tagged(self.span))) + } + AtomicToken::Whitespace { .. } => { + return Err(ShellError::unimplemented("whitespace in AtomicToken")) + } + AtomicToken::Dot { .. } => { + return Err(ShellError::type_error(expected, "dot".tagged(self.span))) + } + AtomicToken::Number { number } => { + Expression::number(number.to_number(context.source), self.span) + } + AtomicToken::FilePath { path } => Expression::file_path( + expand_file_path(path.slice(context.source), context), + self.span, + ), + AtomicToken::Size { number, unit } => { + Expression::size(number.to_number(context.source), **unit, self.span) + } + AtomicToken::String { body } => Expression::string(*body, self.span), + AtomicToken::ItVariable { name } => Expression::it_variable(*name, self.span), + AtomicToken::Variable { name } => Expression::variable(*name, self.span), + AtomicToken::ExternalCommand { command } => { + Expression::external_command(*command, self.span) + } + AtomicToken::ExternalWord { text } => Expression::string(*text, self.span), + AtomicToken::GlobPattern { pattern } => Expression::pattern(*pattern), + AtomicToken::Word { text } => Expression::string(*text, *text), + AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::Pipeline { .. } => unimplemented!("into_hir"), + }) + } + + pub fn spanned_type_name(&self) -> Spanned<&'static str> { + match &self.item { + AtomicToken::Eof { .. } => "eof", + AtomicToken::Error { .. } => "error", + AtomicToken::Operator { .. } => "operator", + AtomicToken::ShorthandFlag { .. } => "shorthand flag", + AtomicToken::LonghandFlag { .. } => "flag", + AtomicToken::Whitespace { .. } => "whitespace", + AtomicToken::Dot { .. } => "dot", + AtomicToken::Number { .. } => "number", + AtomicToken::FilePath { .. } => "file path", + AtomicToken::Size { .. } => "size", + AtomicToken::String { .. } => "string", + AtomicToken::ItVariable { .. } => "$it", + AtomicToken::Variable { .. } => "variable", + AtomicToken::ExternalCommand { .. } => "external command", + AtomicToken::ExternalWord { .. } => "external word", + AtomicToken::GlobPattern { .. } => "file pattern", + AtomicToken::Word { .. } => "word", + AtomicToken::SquareDelimited { .. } => "array literal", + AtomicToken::ParenDelimited { .. } => "parenthesized expression", + AtomicToken::BraceDelimited { .. } => "block", + AtomicToken::Pipeline { .. } => "pipeline", + } + .spanned(self.span) + } + + pub fn tagged_type_name(&self) -> Tagged<&'static str> { + match &self.item { + AtomicToken::Eof { .. } => "eof", + AtomicToken::Error { .. } => "error", + AtomicToken::Operator { .. } => "operator", + AtomicToken::ShorthandFlag { .. } => "shorthand flag", + AtomicToken::LonghandFlag { .. } => "flag", + AtomicToken::Whitespace { .. } => "whitespace", + AtomicToken::Dot { .. } => "dot", + AtomicToken::Number { .. } => "number", + AtomicToken::FilePath { .. } => "file path", + AtomicToken::Size { .. } => "size", + AtomicToken::String { .. } => "string", + AtomicToken::ItVariable { .. } => "$it", + AtomicToken::Variable { .. } => "variable", + AtomicToken::ExternalCommand { .. } => "external command", + AtomicToken::ExternalWord { .. } => "external word", + AtomicToken::GlobPattern { .. } => "file pattern", + AtomicToken::Word { .. } => "word", + AtomicToken::SquareDelimited { .. } => "array literal", + AtomicToken::ParenDelimited { .. } => "parenthesized expression", + AtomicToken::BraceDelimited { .. } => "block", + AtomicToken::Pipeline { .. } => "pipeline", + } + .tagged(self.span) + } + + pub(crate) fn color_tokens(&self, shapes: &mut Vec>) { + match &self.item { + AtomicToken::Eof { .. } => {} + AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.spanned(self.span)), + AtomicToken::Operator { .. } => { + return shapes.push(FlatShape::Operator.spanned(self.span)); + } + AtomicToken::ShorthandFlag { .. } => { + return shapes.push(FlatShape::ShorthandFlag.spanned(self.span)); + } + AtomicToken::LonghandFlag { .. } => { + return shapes.push(FlatShape::Flag.spanned(self.span)); + } + AtomicToken::Whitespace { .. } => { + return shapes.push(FlatShape::Whitespace.spanned(self.span)); + } + AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.spanned(self.span)), + AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.spanned(self.span)), + AtomicToken::Number { + number: RawNumber::Decimal(_), + } => { + return shapes.push(FlatShape::Decimal.spanned(self.span)); + } + AtomicToken::Number { + number: RawNumber::Int(_), + } => { + return shapes.push(FlatShape::Int.spanned(self.span)); + } + AtomicToken::Size { number, unit } => { + return shapes.push( + FlatShape::Size { + number: number.span, + unit: unit.span, + } + .spanned(self.span), + ); + } + AtomicToken::String { .. } => return shapes.push(FlatShape::String.spanned(self.span)), + AtomicToken::ItVariable { .. } => { + return shapes.push(FlatShape::ItVariable.spanned(self.span)) + } + AtomicToken::Variable { .. } => { + return shapes.push(FlatShape::Variable.spanned(self.span)) + } + AtomicToken::ExternalCommand { .. } => { + return shapes.push(FlatShape::ExternalCommand.spanned(self.span)); + } + AtomicToken::ExternalWord { .. } => { + return shapes.push(FlatShape::ExternalWord.spanned(self.span)) + } + AtomicToken::GlobPattern { .. } => { + return shapes.push(FlatShape::GlobPattern.spanned(self.span)) + } + AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.spanned(self.span)), + _ => return shapes.push(FlatShape::Error.spanned(self.span)), + } + } +} + +#[derive(Debug)] +pub enum WhitespaceHandling { + #[allow(unused)] + AllowWhitespace, + RejectWhitespace, +} + +#[derive(Debug)] +pub struct ExpansionRule { + pub(crate) allow_external_command: bool, + pub(crate) allow_external_word: bool, + pub(crate) allow_operator: bool, + pub(crate) allow_eof: bool, + pub(crate) treat_size_as_word: bool, + pub(crate) commit_errors: bool, + pub(crate) whitespace: WhitespaceHandling, +} + +impl ExpansionRule { + pub fn new() -> ExpansionRule { + ExpansionRule { + allow_external_command: false, + allow_external_word: false, + allow_operator: false, + allow_eof: false, + treat_size_as_word: false, + commit_errors: false, + whitespace: WhitespaceHandling::RejectWhitespace, + } + } + + /// The intent of permissive mode is to return an atomic token for every possible + /// input token. This is important for error-correcting parsing, such as the + /// syntax highlighter. + pub fn permissive() -> ExpansionRule { + ExpansionRule { + allow_external_command: true, + allow_external_word: true, + allow_operator: true, + allow_eof: true, + treat_size_as_word: false, + commit_errors: true, + whitespace: WhitespaceHandling::AllowWhitespace, + } + } + + #[allow(unused)] + pub fn allow_external_command(mut self) -> ExpansionRule { + self.allow_external_command = true; + self + } + + #[allow(unused)] + pub fn allow_operator(mut self) -> ExpansionRule { + self.allow_operator = true; + self + } + + #[allow(unused)] + pub fn no_operator(mut self) -> ExpansionRule { + self.allow_operator = false; + self + } + + #[allow(unused)] + pub fn no_external_command(mut self) -> ExpansionRule { + self.allow_external_command = false; + self + } + + #[allow(unused)] + pub fn allow_external_word(mut self) -> ExpansionRule { + self.allow_external_word = true; + self + } + + #[allow(unused)] + pub fn no_external_word(mut self) -> ExpansionRule { + self.allow_external_word = false; + self + } + + #[allow(unused)] + pub fn treat_size_as_word(mut self) -> ExpansionRule { + self.treat_size_as_word = true; + self + } + + #[allow(unused)] + pub fn commit_errors(mut self) -> ExpansionRule { + self.commit_errors = true; + self + } + + #[allow(unused)] + pub fn allow_whitespace(mut self) -> ExpansionRule { + self.whitespace = WhitespaceHandling::AllowWhitespace; + self + } + + #[allow(unused)] + pub fn reject_whitespace(mut self) -> ExpansionRule { + self.whitespace = WhitespaceHandling::RejectWhitespace; + self + } +} + +/// If the caller of expand_atom throws away the returned atomic token returned, it +/// must use a checkpoint to roll it back. +pub fn expand_atom<'me, 'content>( + token_nodes: &'me mut TokensIterator<'content>, + expected: &'static str, + context: &ExpandContext, + rule: ExpansionRule, +) -> Result, ShellError> { + if token_nodes.at_end() { + match rule.allow_eof { + true => { + return Ok(AtomicToken::Eof { + span: Span::unknown(), + } + .spanned(Span::unknown())) + } + false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())), + } + } + + // First, we'll need to handle the situation where more than one token corresponds + // to a single atomic token + + // If treat_size_as_word, don't try to parse the head of the token stream + // as a size. + match rule.treat_size_as_word { + true => {} + false => match expand_syntax(&UnitShape, token_nodes, context) { + // If the head of the stream isn't a valid unit, we'll try to parse + // it again next as a word + Err(_) => {} + + // But if it was a valid unit, we're done here + Ok(Spanned { + item: (number, unit), + span, + }) => return Ok(AtomicToken::Size { number, unit }.spanned(span)), + }, + } + + // Try to parse the head of the stream as a bare path. A bare path includes + // words as well as `.`s, connected together without whitespace. + match expand_syntax(&BarePathShape, token_nodes, context) { + // If we didn't find a bare path + Err(_) => {} + Ok(span) => { + let next = token_nodes.peek_any(); + + match next.node { + Some(token) if token.is_pattern() => { + // if the very next token is a pattern, we're looking at a glob, not a + // word, and we should try to parse it as a glob next + } + + _ => return Ok(AtomicToken::Word { text: span }.spanned(span)), + } + } + } + + // Try to parse the head of the stream as a pattern. A pattern includes + // words, words with `*` as well as `.`s, connected together without whitespace. + match expand_syntax(&BarePatternShape, token_nodes, context) { + // If we didn't find a bare path + Err(_) => {} + Ok(span) => return Ok(AtomicToken::GlobPattern { pattern: span }.spanned(span)), + } + + // The next token corresponds to at most one atomic token + + // We need to `peek` because `parse_single_node` doesn't cover all of the + // cases that `expand_atom` covers. We should probably collapse the two + // if possible. + let peeked = token_nodes.peek_any().not_eof(expected)?; + + match peeked.node { + TokenNode::Token(_) => { + // handle this next + } + + TokenNode::Error(error) => { + peeked.commit(); + return Ok(AtomicToken::Error { + error: error.clone(), + } + .spanned(error.span)); + } + + // [ ... ] + TokenNode::Delimited(Spanned { + item: + DelimitedNode { + delimiter: Delimiter::Square, + spans, + children, + }, + span, + }) => { + peeked.commit(); + let span = *span; + return Ok(AtomicToken::SquareDelimited { + nodes: children, + spans: *spans, + } + .spanned(span)); + } + + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Shorthand, + name, + }, + span, + }) => { + peeked.commit(); + return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span)); + } + + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Longhand, + name, + }, + span, + }) => { + peeked.commit(); + return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span)); + } + + // If we see whitespace, process the whitespace according to the whitespace + // handling rules + TokenNode::Whitespace(span) => match rule.whitespace { + // if whitespace is allowed, return a whitespace token + WhitespaceHandling::AllowWhitespace => { + peeked.commit(); + return Ok(AtomicToken::Whitespace { text: *span }.spanned(*span)); + } + + // if whitespace is disallowed, return an error + WhitespaceHandling::RejectWhitespace => { + return Err(ShellError::syntax_error("Unexpected whitespace".tagged( + Tag { + span: *span, + anchor: None, + }, + ))) + } + }, + + other => { + let span = peeked.node.span(); + + peeked.commit(); + return Ok(AtomicToken::Error { + error: ShellError::type_error("token", other.tagged_type_name()).spanned(span), + } + .spanned(span)); + } + } + + parse_single_node(token_nodes, expected, |token, token_span, err| { + Ok(match token { + // First, the error cases. Each error case corresponds to a expansion rule + // flag that can be used to allow the case + + // rule.allow_operator + RawToken::Operator(_) if !rule.allow_operator => return Err(err.error()), + // rule.allow_external_command + RawToken::ExternalCommand(_) if !rule.allow_external_command => { + return Err(ShellError::type_error( + expected, + token.type_name().tagged(Tag { + span: token_span, + anchor: None, + }), + )) + } + // rule.allow_external_word + RawToken::ExternalWord if !rule.allow_external_word => { + return Err(ShellError::invalid_external_word(Tag { + span: token_span, + anchor: None, + })) + } + + RawToken::Number(number) => AtomicToken::Number { number }.spanned(token_span), + RawToken::Operator(_) => AtomicToken::Operator { text: token_span }.spanned(token_span), + RawToken::String(body) => AtomicToken::String { body }.spanned(token_span), + RawToken::Variable(name) if name.slice(context.source) == "it" => { + AtomicToken::ItVariable { name }.spanned(token_span) + } + RawToken::Variable(name) => AtomicToken::Variable { name }.spanned(token_span), + RawToken::ExternalCommand(command) => { + AtomicToken::ExternalCommand { command }.spanned(token_span) + } + RawToken::ExternalWord => { + AtomicToken::ExternalWord { text: token_span }.spanned(token_span) + } + RawToken::GlobPattern => AtomicToken::GlobPattern { + pattern: token_span, + } + .spanned(token_span), + RawToken::Bare => AtomicToken::Word { text: token_span }.spanned(token_span), + }) + }) +} diff --git a/src/parser/hir/syntax_shape/expression/delimited.rs b/src/parser/hir/syntax_shape/expression/delimited.rs new file mode 100644 index 000000000..b52340ab8 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/delimited.rs @@ -0,0 +1,49 @@ +use crate::parser::hir::syntax_shape::{ + color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode, +}; +use crate::parser::{hir, hir::TokensIterator, Delimiter, FlatShape}; +use crate::prelude::*; + +pub fn expand_delimited_square( + children: &Vec, + span: Span, + context: &ExpandContext, +) -> Result { + let mut tokens = TokensIterator::new(&children, span, false); + + let list = expand_syntax(&ExpressionListShape, &mut tokens, context); + + Ok(hir::Expression::list(list?, Tag { span, anchor: None })) +} + +pub fn color_delimited_square( + (open, close): (Span, Span), + children: &Vec, + span: Span, + context: &ExpandContext, + shapes: &mut Vec>, +) { + shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open)); + let mut tokens = TokensIterator::new(&children, span, false); + let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes); + shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close)); +} + +#[derive(Debug, Copy, Clone)] +pub struct DelimitedShape; + +impl ColorSyntax for DelimitedShape { + type Info = (); + type Input = (Delimiter, Span, Span); + fn color_syntax<'a, 'b>( + &self, + (delimiter, open, close): &(Delimiter, Span, Span), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + shapes.push(FlatShape::OpenDelimiter(*delimiter).spanned(*open)); + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close)); + } +} diff --git a/src/parser/hir/syntax_shape/expression/file_path.rs b/src/parser/hir/syntax_shape/expression/file_path.rs new file mode 100644 index 000000000..ccb2f8f54 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/file_path.rs @@ -0,0 +1,71 @@ +use crate::parser::hir::syntax_shape::expression::atom::{expand_atom, AtomicToken, ExpansionRule}; +use crate::parser::hir::syntax_shape::{ + expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape, +}; +use crate::parser::{hir, hir::TokensIterator}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct FilePathShape; + +impl FallibleColorSyntax for FilePathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "file path", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom.item { + AtomicToken::Word { .. } + | AtomicToken::String { .. } + | AtomicToken::Number { .. } + | AtomicToken::Size { .. } => { + shapes.push(FlatShape::Path.spanned(atom.span)); + } + + _ => atom.color_tokens(shapes), + } + + Ok(()) + } +} + +impl ExpandExpression for FilePathShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let atom = expand_atom(token_nodes, "file path", context, ExpansionRule::new())?; + + match atom.item { + AtomicToken::Word { text: body } | AtomicToken::String { body } => { + let path = expand_file_path(body.slice(context.source), context); + return Ok(hir::Expression::file_path(path, atom.span)); + } + + AtomicToken::Number { .. } | AtomicToken::Size { .. } => { + let path = atom.span.slice(context.source); + return Ok(hir::Expression::file_path(path, atom.span)); + } + + _ => return atom.into_hir(context, "file path"), + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/list.rs b/src/parser/hir/syntax_shape/expression/list.rs new file mode 100644 index 000000000..575ae9fcd --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/list.rs @@ -0,0 +1,176 @@ +use crate::errors::ShellError; +use crate::parser::{ + hir, + hir::syntax_shape::{ + color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced, + AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule, + MaybeSpaceShape, SpaceShape, + }, + hir::TokensIterator, + FlatShape, +}; +use crate::Spanned; + +#[derive(Debug, Copy, Clone)] +pub struct ExpressionListShape; + +impl ExpandSyntax for ExpressionListShape { + type Output = Vec; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result, ShellError> { + let mut exprs = vec![]; + + if token_nodes.at_end_possible_ws() { + return Ok(exprs); + } + + let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?; + + exprs.push(expr); + + loop { + if token_nodes.at_end_possible_ws() { + return Ok(exprs); + } + + let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?; + + exprs.push(expr); + } + } +} + +impl ColorSyntax for ExpressionListShape { + type Info = (); + type Input = (); + + /// The intent of this method is to fully color an expression list shape infallibly. + /// This means that if we can't expand a token into an expression, we fall back to + /// a simpler coloring strategy. + /// + /// This would apply to something like `where x >`, which includes an incomplete + /// binary operator. Since we will fail to process it as a binary operator, we'll + /// fall back to a simpler coloring and move on. + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + // We encountered a parsing error and will continue with simpler coloring ("backoff + // coloring mode") + let mut backoff = false; + + // Consume any leading whitespace + color_syntax(&MaybeSpaceShape, token_nodes, context, shapes); + + loop { + // If we reached the very end of the token stream, we're done + if token_nodes.at_end() { + return; + } + + if backoff { + let len = shapes.len(); + + // If we previously encountered a parsing error, use backoff coloring mode + color_syntax(&SimplestExpression, token_nodes, context, shapes); + + if len == shapes.len() && !token_nodes.at_end() { + // This should never happen, but if it does, a panic is better than an infinite loop + panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression") + } + } else { + // Try to color the head of the stream as an expression + match color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) { + // If no expression was found, switch to backoff coloring mode + Err(_) => { + backoff = true; + continue; + } + Ok(_) => {} + } + + // If an expression was found, consume a space + match color_fallible_syntax(&SpaceShape, token_nodes, context, shapes) { + Err(_) => { + // If no space was found, we're either at the end or there's an error. + // Either way, switch to backoff coloring mode. If we're at the end + // it won't have any consequences. + backoff = true; + } + Ok(_) => { + // Otherwise, move on to the next expression + } + } + } + } + } +} + +/// BackoffColoringMode consumes all of the remaining tokens in an infallible way +#[derive(Debug, Copy, Clone)] +pub struct BackoffColoringMode; + +impl ColorSyntax for BackoffColoringMode { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + loop { + if token_nodes.at_end() { + break; + } + + let len = shapes.len(); + color_syntax(&SimplestExpression, token_nodes, context, shapes); + + if len == shapes.len() && !token_nodes.at_end() { + // This shouldn't happen, but if it does, a panic is better than an infinite loop + panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, shapes); + } + } + } +} + +/// The point of `SimplestExpression` is to serve as an infallible base case for coloring. +/// As a last ditch effort, if we can't find any way to parse the head of the stream as an +/// expression, fall back to simple coloring. +#[derive(Debug, Copy, Clone)] +pub struct SimplestExpression; + +impl ColorSyntax for SimplestExpression { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + let atom = expand_atom( + token_nodes, + "any token", + context, + ExpansionRule::permissive(), + ); + + match atom { + Err(_) => {} + Ok(atom) => atom.color_tokens(shapes), + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs new file mode 100644 index 000000000..a4e2a9323 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -0,0 +1,136 @@ +use crate::parser::hir::syntax_shape::{ + expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, + FallibleColorSyntax, FlatShape, +}; +use crate::parser::{ + hir, + hir::{RawNumber, TokensIterator}, + RawToken, +}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct NumberShape; + +impl ExpandExpression for NumberShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "Number", |token, token_span, err| { + Ok(match token { + RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), + RawToken::Variable(tag) if tag.slice(context.source) == "it" => { + hir::Expression::it_variable(tag, token_span) + } + RawToken::ExternalCommand(tag) => { + hir::Expression::external_command(tag, token_span) + } + RawToken::ExternalWord => { + return Err(ShellError::invalid_external_word(Tag { + span: token_span, + anchor: None, + })) + } + RawToken::Variable(tag) => hir::Expression::variable(tag, token_span), + RawToken::Number(number) => { + hir::Expression::number(number.to_number(context.source), token_span) + } + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(tag) => hir::Expression::string(tag, token_span), + }) + }) + } +} + +impl FallibleColorSyntax for NumberShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + shapes.push(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + atom.color_tokens(shapes); + + Ok(()) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct IntShape; + +impl ExpandExpression for IntShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "Integer", |token, token_span, err| { + Ok(match token { + RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), + RawToken::ExternalWord => { + return Err(ShellError::invalid_external_word(token_span)) + } + RawToken::Variable(span) if span.slice(context.source) == "it" => { + hir::Expression::it_variable(span, token_span) + } + RawToken::ExternalCommand(span) => { + hir::Expression::external_command(span, token_span) + } + RawToken::Variable(span) => hir::Expression::variable(span, token_span), + RawToken::Number(number @ RawNumber::Int(_)) => { + hir::Expression::number(number.to_number(context.source), token_span) + } + RawToken::Number(_) => return Err(err.error()), + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(span) => hir::Expression::string(span, token_span), + }) + }) + } +} + +impl FallibleColorSyntax for IntShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + shapes.push(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + atom.color_tokens(shapes); + + Ok(()) + } +} diff --git a/src/parser/hir/syntax_shape/expression/pattern.rs b/src/parser/hir/syntax_shape/expression/pattern.rs new file mode 100644 index 000000000..0a11552d5 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/pattern.rs @@ -0,0 +1,112 @@ +use crate::parser::hir::syntax_shape::{ + expand_atom, expand_bare, expand_syntax, expression::expand_file_path, parse_single_node, + AtomicToken, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax, + FlatShape, +}; +use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct PatternShape; + +impl FallibleColorSyntax for PatternShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; + + match &atom.item { + AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => { + shapes.push(FlatShape::GlobPattern.spanned(atom.span)); + Ok(()) + } + + _ => Err(ShellError::type_error("pattern", atom.tagged_type_name())), + } + }) + } +} + +impl ExpandExpression for PatternShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let pattern = expand_syntax(&BarePatternShape, token_nodes, context); + + match pattern { + Ok(tag) => { + return Ok(hir::Expression::pattern(tag)); + } + Err(_) => {} + } + + parse_single_node(token_nodes, "Pattern", |token, token_tag, _| { + Ok(match token { + RawToken::GlobPattern => { + return Err(ShellError::unreachable( + "glob pattern after glob already returned", + )) + } + RawToken::Operator(..) => { + return Err(ShellError::unreachable("dot after glob already returned")) + } + RawToken::Bare => { + return Err(ShellError::unreachable("bare after glob already returned")) + } + + RawToken::Variable(tag) if tag.slice(context.source) == "it" => { + hir::Expression::it_variable(tag, token_tag) + } + RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), + RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), + RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), + RawToken::Number(_) => hir::Expression::bare(token_tag), + + RawToken::String(tag) => hir::Expression::file_path( + expand_file_path(tag.slice(context.source), context), + token_tag, + ), + }) + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BarePatternShape; + +impl ExpandSyntax for BarePatternShape { + type Output = Span; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_bare(token_nodes, context, |token| match token { + TokenNode::Token(Spanned { + item: RawToken::Bare, + .. + }) + | TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + .. + }) + | TokenNode::Token(Spanned { + item: RawToken::GlobPattern, + .. + }) => true, + + _ => false, + }) + } +} diff --git a/src/parser/hir/syntax_shape/expression/string.rs b/src/parser/hir/syntax_shape/expression/string.rs new file mode 100644 index 000000000..0dabd70a8 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/string.rs @@ -0,0 +1,94 @@ +use crate::parser::hir::syntax_shape::{ + expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression, + ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, +}; +use crate::parser::hir::tokens_iterator::Peeked; +use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct StringShape; + +impl FallibleColorSyntax for StringShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive()); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom { + Spanned { + item: AtomicToken::String { .. }, + span, + } => shapes.push((*input).spanned(span)), + other => other.color_tokens(shapes), + } + + Ok(()) + } +} + +impl ExpandExpression for StringShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "String", |token, token_span, _| { + Ok(match token { + RawToken::GlobPattern => { + return Err(ShellError::type_error( + "String", + "glob pattern".tagged(token_span), + )) + } + RawToken::Operator(..) => { + return Err(ShellError::type_error( + "String", + "operator".tagged(token_span), + )) + } + RawToken::Variable(span) => expand_variable(span, token_span, &context.source), + RawToken::ExternalCommand(span) => { + hir::Expression::external_command(span, token_span) + } + RawToken::ExternalWord => { + return Err(ShellError::invalid_external_word(token_span)) + } + RawToken::Number(_) => hir::Expression::bare(token_span), + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(span) => hir::Expression::string(span, token_span), + }) + }) + } +} + +impl TestSyntax for StringShape { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Option> { + let peeked = token_nodes.peek_any(); + + match peeked.node { + Some(TokenNode::Token(token)) => match token.item { + RawToken::String(_) => Some(peeked), + _ => None, + }, + + _ => None, + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/unit.rs b/src/parser/hir/syntax_shape/expression/unit.rs new file mode 100644 index 000000000..03602f108 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/unit.rs @@ -0,0 +1,92 @@ +use crate::data::meta::Span; +use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax}; +use crate::parser::parse::tokens::RawNumber; +use crate::parser::parse::unit::Unit; +use crate::parser::{hir::TokensIterator, RawToken, TokenNode}; +use crate::prelude::*; +use nom::branch::alt; +use nom::bytes::complete::tag; +use nom::character::complete::digit1; +use nom::combinator::{all_consuming, opt, value}; +use nom::IResult; + +#[derive(Debug, Copy, Clone)] +pub struct UnitShape; + +impl ExpandSyntax for UnitShape { + type Output = Spanned<(Spanned, Spanned)>; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result, Spanned)>, ShellError> { + let peeked = token_nodes.peek_any().not_eof("unit")?; + + let span = match peeked.node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => span, + _ => return Err(peeked.type_error("unit")), + }; + + let unit = unit_size(span.slice(context.source), *span); + + let (_, (number, unit)) = match unit { + Err(_) => { + return Err(ShellError::type_error( + "unit", + "word".tagged(Tag::unknown()), + )) + } + Ok((number, unit)) => (number, unit), + }; + + peeked.commit(); + Ok((number, unit).spanned(*span)) + } +} + +fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned, Spanned)> { + let (input, digits) = digit1(input)?; + + let (input, dot) = opt(tag("."))(input)?; + + let (input, number) = match dot { + Some(dot) => { + let (input, rest) = digit1(input)?; + ( + input, + RawNumber::decimal(Span::new( + bare_span.start(), + bare_span.start() + digits.len() + dot.len() + rest.len(), + )), + ) + } + + None => ( + input, + RawNumber::int(Span::new( + bare_span.start(), + bare_span.start() + digits.len(), + )), + ), + }; + + let (input, unit) = all_consuming(alt(( + value(Unit::B, alt((tag("B"), tag("b")))), + value(Unit::KB, alt((tag("KB"), tag("kb"), tag("Kb")))), + value(Unit::MB, alt((tag("MB"), tag("mb"), tag("Mb")))), + value(Unit::MB, alt((tag("GB"), tag("gb"), tag("Gb")))), + value(Unit::MB, alt((tag("TB"), tag("tb"), tag("Tb")))), + value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))), + )))(input)?; + + let start_span = number.span.end(); + + Ok(( + input, + (number, unit.spanned(Span::new(start_span, bare_span.end()))), + )) +} diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs new file mode 100644 index 000000000..04b511d89 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -0,0 +1,735 @@ +use crate::parser::hir::syntax_shape::{ + color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax, + parse_single_node, AnyExpressionShape, AtomicToken, BareShape, ExpandContext, ExpandExpression, + ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, Peeked, SkipSyntax, StringShape, + TestSyntax, WhitespaceShape, +}; +use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct VariablePathShape; + +impl ExpandExpression for VariablePathShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // 1. let the head be the first token, expecting a variable + // 2. let the tail be an empty list of members + // 2. while the next token (excluding ws) is a dot: + // 1. consume the dot + // 2. consume the next token as a member and push it onto tail + + let head = expand_expr(&VariableShape, token_nodes, context)?; + let start = head.span; + let mut end = start; + let mut tail: Vec> = vec![]; + + loop { + match DotShape.skip(token_nodes, context) { + Err(_) => break, + Ok(_) => {} + } + + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_spanned_string(context.source); + + end = member.span; + tail.push(member); + } + + Ok(hir::Expression::path(head, tail, start.until(end))) + } +} + +impl FallibleColorSyntax for VariablePathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + // If the head of the token stream is not a variable, fail + color_fallible_syntax(&VariableShape, token_nodes, context, shapes)?; + + loop { + // look for a dot at the head of a stream + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + // if there's no dot, we're done + match dot { + Err(_) => break, + Ok(_) => {} + } + + // otherwise, look for a member, and if you don't find one, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + } + + Ok(()) + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct PathTailShape; + +/// The failure mode of `PathTailShape` is a dot followed by a non-member +impl FallibleColorSyntax for PathTailShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| loop { + let result = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + match result { + Err(_) => return Ok(()), + Ok(_) => {} + } + + // If we've seen a dot but not a member, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + }) + } +} + +impl ExpandSyntax for PathTailShape { + type Output = (Vec>, Span); + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let mut end: Option = None; + let mut tail = vec![]; + + loop { + match DotShape.skip(token_nodes, context) { + Err(_) => break, + Ok(_) => {} + } + + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_spanned_string(context.source); + end = Some(member.span); + tail.push(member); + } + + match end { + None => { + return Err(ShellError::type_error("path tail", { + let typed_span = token_nodes.typed_span_at_cursor(); + + Tagged { + tag: typed_span.span.into(), + item: typed_span.item, + } + })) + } + + Some(end) => Ok((tail, end)), + } + } +} + +#[derive(Debug)] +pub enum ExpressionContinuation { + DotSuffix(Span, Spanned), + InfixSuffix(Spanned, Expression), +} + +/// An expression continuation +#[derive(Debug, Copy, Clone)] +pub struct ExpressionContinuationShape; + +impl ExpandSyntax for ExpressionContinuationShape { + type Output = ExpressionContinuation; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // Try to expand a `.` + let dot = expand_syntax(&DotShape, token_nodes, context); + + match dot { + // If a `.` was matched, it's a `Path`, and we expect a `Member` next + Ok(dot) => { + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_spanned_string(context.source); + + Ok(ExpressionContinuation::DotSuffix(dot, member)) + } + + // Otherwise, we expect an infix operator and an expression next + Err(_) => { + let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?; + let next = expand_expr(&AnyExpressionShape, token_nodes, context)?; + + Ok(ExpressionContinuation::InfixSuffix(op, next)) + } + } + } +} + +pub enum ContinuationInfo { + Dot, + Infix, +} + +impl FallibleColorSyntax for ExpressionContinuationShape { + type Info = ContinuationInfo; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result { + token_nodes.atomic(|token_nodes| { + // Try to expand a `.` + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + match dot { + Ok(_) => { + // we found a dot, so let's keep looking for a member; if no member was found, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + Ok(ContinuationInfo::Dot) + } + Err(_) => { + let mut new_shapes = vec![]; + let result = token_nodes.atomic(|token_nodes| { + // we didn't find a dot, so let's see if we're looking at an infix. If not found, fail + color_fallible_syntax(&InfixShape, token_nodes, context, &mut new_shapes)?; + + // now that we've seen an infix shape, look for any expression. If not found, fail + color_fallible_syntax( + &AnyExpressionShape, + token_nodes, + context, + &mut new_shapes, + )?; + + Ok(ContinuationInfo::Infix) + })?; + shapes.extend(new_shapes); + Ok(result) + } + } + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct VariableShape; + +impl ExpandExpression for VariableShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "variable", |token, token_tag, _| { + Ok(match token { + RawToken::Variable(tag) => { + if tag.slice(context.source) == "it" { + hir::Expression::it_variable(tag, token_tag) + } else { + hir::Expression::variable(tag, token_tag) + } + } + _ => { + return Err(ShellError::type_error( + "variable", + token.type_name().tagged(token_tag), + )) + } + }) + }) + } +} + +impl FallibleColorSyntax for VariableShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "variable", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(err) => return Err(err), + Ok(atom) => atom, + }; + + match &atom.item { + AtomicToken::Variable { .. } => { + shapes.push(FlatShape::Variable.spanned(atom.span)); + Ok(()) + } + AtomicToken::ItVariable { .. } => { + shapes.push(FlatShape::ItVariable.spanned(atom.span)); + Ok(()) + } + _ => Err(ShellError::type_error("variable", atom.tagged_type_name())), + } + } +} + +#[derive(Debug, Clone, Copy)] +pub enum Member { + String(/* outer */ Span, /* inner */ Span), + Bare(Span), +} + +impl Member { + pub(crate) fn to_expr(&self) -> hir::Expression { + match self { + Member::String(outer, inner) => hir::Expression::string(*inner, *outer), + Member::Bare(span) => hir::Expression::string(*span, *span), + } + } + + pub(crate) fn span(&self) -> Span { + match self { + Member::String(outer, _inner) => *outer, + Member::Bare(span) => *span, + } + } + + pub(crate) fn to_spanned_string(&self, source: &str) -> Spanned { + match self { + Member::String(outer, inner) => inner.string(source).spanned(*outer), + Member::Bare(span) => span.spanned_string(source), + } + } + + pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> { + match self { + Member::String(outer, _inner) => "string".tagged(outer), + Member::Bare(span) => "word".tagged(Tag { + span: *span, + anchor: None, + }), + } + } +} + +enum ColumnPathState { + Initial, + LeadingDot(Span), + Dot(Span, Vec, Span), + Member(Span, Vec), + Error(ShellError), +} + +impl ColumnPathState { + pub fn dot(self, dot: Span) -> ColumnPathState { + match self { + ColumnPathState::Initial => ColumnPathState::LeadingDot(dot), + ColumnPathState::LeadingDot(_) => { + ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Dot(..) => { + ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot), + ColumnPathState::Error(err) => ColumnPathState::Error(err), + } + } + + pub fn member(self, member: Member) -> ColumnPathState { + match self { + ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]), + ColumnPathState::LeadingDot(tag) => { + ColumnPathState::Member(tag.until(member.span()), vec![member]) + } + + ColumnPathState::Dot(tag, mut tags, _) => { + ColumnPathState::Member(tag.until(member.span()), { + tags.push(member); + tags + }) + } + ColumnPathState::Member(..) => { + ColumnPathState::Error(ShellError::type_error("column", member.tagged_type_name())) + } + ColumnPathState::Error(err) => ColumnPathState::Error(err), + } + } + + pub fn into_path(self, next: Peeked) -> Result>, ShellError> { + match self { + ColumnPathState::Initial => Err(next.type_error("column path")), + ColumnPathState::LeadingDot(dot) => { + Err(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Dot(_tag, _members, dot) => { + Err(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)), + ColumnPathState::Error(err) => Err(err), + } + } +} + +pub fn expand_column_path<'a, 'b>( + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result>, ShellError> { + let mut state = ColumnPathState::Initial; + + loop { + let member = MemberShape.expand_syntax(token_nodes, context); + + match member { + Err(_) => break, + Ok(member) => state = state.member(member), + } + + let dot = DotShape.expand_syntax(token_nodes, context); + + match dot { + Err(_) => break, + Ok(dot) => state = state.dot(dot), + } + } + + state.into_path(token_nodes.peek_non_ws()) +} + +#[derive(Debug, Copy, Clone)] +pub struct ColumnPathShape; + +impl FallibleColorSyntax for ColumnPathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // If there's not even one member shape, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + loop { + let checkpoint = token_nodes.checkpoint(); + + match color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + checkpoint.iterator, + context, + shapes, + ) { + Err(_) => { + // we already saw at least one member shape, so return successfully + return Ok(()); + } + + Ok(_) => { + match color_fallible_syntax(&MemberShape, checkpoint.iterator, context, shapes) + { + Err(_) => { + // we saw a dot but not a member (but we saw at least one member), + // so don't commit the dot but return successfully + return Ok(()); + } + + Ok(_) => { + // we saw a dot and a member, so commit it and continue on + checkpoint.commit(); + } + } + } + } + } + } +} + +impl ExpandSyntax for ColumnPathShape { + type Output = Tagged>; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_column_path(token_nodes, context) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct MemberShape; + +impl FallibleColorSyntax for MemberShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let bare = color_fallible_syntax_with( + &BareShape, + &FlatShape::BareMember, + token_nodes, + context, + shapes, + ); + + match bare { + Ok(_) => return Ok(()), + Err(_) => { + // If we don't have a bare word, we'll look for a string + } + } + + // Look for a string token. If we don't find one, fail + color_fallible_syntax_with( + &StringShape, + &FlatShape::StringMember, + token_nodes, + context, + shapes, + ) + } +} + +impl ExpandSyntax for MemberShape { + type Output = Member; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let bare = BareShape.test(token_nodes, context); + if let Some(peeked) = bare { + let node = peeked.not_eof("column")?.commit(); + return Ok(Member::Bare(node.span())); + } + + let string = StringShape.test(token_nodes, context); + + if let Some(peeked) = string { + let node = peeked.not_eof("column")?.commit(); + let (outer, inner) = node.expect_string(); + + return Ok(Member::String(outer, inner)); + } + + Err(token_nodes.peek_any().type_error("column")) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct DotShape; + +#[derive(Debug, Copy, Clone)] +pub struct ColorableDotShape; + +impl FallibleColorSyntax for ColorableDotShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("dot")?; + + match peeked.node { + node if node.is_dot() => { + peeked.commit(); + shapes.push((*input).spanned(node.span())); + Ok(()) + } + + other => Err(ShellError::type_error("dot", other.tagged_type_name())), + } + } +} + +impl SkipSyntax for DotShape { + fn skip<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + expand_syntax(self, token_nodes, context)?; + + Ok(()) + } +} + +impl ExpandSyntax for DotShape { + type Output = Span; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "dot", |token, token_span, _| { + Ok(match token { + RawToken::Operator(Operator::Dot) => token_span, + _ => { + return Err(ShellError::type_error( + "dot", + token.type_name().tagged(token_span), + )) + } + }) + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct InfixShape; + +impl FallibleColorSyntax for InfixShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + outer_shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let checkpoint = token_nodes.checkpoint(); + let mut shapes = vec![]; + + // An infix operator must be prefixed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?; + + // Parse the next TokenNode after the whitespace + parse_single_node( + checkpoint.iterator, + "infix operator", + |token, token_span, _| { + match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => { + shapes.push(FlatShape::Operator.spanned(token_span)); + Ok(()) + } + + // Otherwise, it's not a match + _ => Err(ShellError::type_error( + "infix operator", + token.type_name().tagged(token_span), + )), + } + }, + )?; + + // An infix operator must be followed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?; + + outer_shapes.extend(shapes); + checkpoint.commit(); + Ok(()) + } +} + +impl ExpandSyntax for InfixShape { + type Output = (Span, Spanned, Span); + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let checkpoint = token_nodes.checkpoint(); + + // An infix operator must be prefixed by whitespace + let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + // Parse the next TokenNode after the whitespace + let operator = parse_single_node( + checkpoint.iterator, + "infix operator", + |token, token_span, _| { + Ok(match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => { + operator.spanned(token_span) + } + + // Otherwise, it's not a match + _ => { + return Err(ShellError::type_error( + "infix operator", + token.type_name().tagged(token_span), + )) + } + }) + }, + )?; + + // An infix operator must be followed by whitespace + let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + checkpoint.commit(); + + Ok((start, operator, end)) + } +} diff --git a/src/parser/hir/syntax_shape/flat_shape.rs b/src/parser/hir/syntax_shape/flat_shape.rs new file mode 100644 index 000000000..b961d1f56 --- /dev/null +++ b/src/parser/hir/syntax_shape/flat_shape.rs @@ -0,0 +1,97 @@ +use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode}; +use crate::{Span, Spanned, SpannedItem, Text}; + +#[derive(Debug, Copy, Clone)] +pub enum FlatShape { + OpenDelimiter(Delimiter), + CloseDelimiter(Delimiter), + ItVariable, + Variable, + Operator, + Dot, + InternalCommand, + ExternalCommand, + ExternalWord, + BareMember, + StringMember, + String, + Path, + Word, + Pipe, + GlobPattern, + Flag, + ShorthandFlag, + Int, + Decimal, + Whitespace, + Error, + Size { number: Span, unit: Span }, +} + +impl FlatShape { + pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec>) -> () { + match token { + TokenNode::Token(token) => match token.item { + RawToken::Number(RawNumber::Int(_)) => { + shapes.push(FlatShape::Int.spanned(token.span)) + } + RawToken::Number(RawNumber::Decimal(_)) => { + shapes.push(FlatShape::Decimal.spanned(token.span)) + } + RawToken::Operator(Operator::Dot) => { + shapes.push(FlatShape::Dot.spanned(token.span)) + } + RawToken::Operator(_) => shapes.push(FlatShape::Operator.spanned(token.span)), + RawToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)), + RawToken::Variable(v) if v.slice(source) == "it" => { + shapes.push(FlatShape::ItVariable.spanned(token.span)) + } + RawToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)), + RawToken::ExternalCommand(_) => { + shapes.push(FlatShape::ExternalCommand.spanned(token.span)) + } + RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(token.span)), + RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(token.span)), + RawToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)), + }, + TokenNode::Call(_) => unimplemented!(), + TokenNode::Nodes(nodes) => { + for node in &nodes.item { + FlatShape::from(node, source, shapes); + } + } + TokenNode::Delimited(v) => { + shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0)); + for token in &v.item.children { + FlatShape::from(token, source, shapes); + } + shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1)); + } + TokenNode::Pipeline(pipeline) => { + for part in &pipeline.parts { + if let Some(_) = part.pipe { + shapes.push(FlatShape::Pipe.spanned(part.span)); + } + } + } + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Longhand, + .. + }, + span, + }) => shapes.push(FlatShape::Flag.spanned(*span)), + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Shorthand, + .. + }, + span, + }) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)), + TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())), + TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)), + } + } +} diff --git a/src/parser/hir/tokens_iterator.rs b/src/parser/hir/tokens_iterator.rs new file mode 100644 index 000000000..dbcf5e6c4 --- /dev/null +++ b/src/parser/hir/tokens_iterator.rs @@ -0,0 +1,477 @@ +pub(crate) mod debug; + +use crate::errors::ShellError; +use crate::parser::TokenNode; +use crate::{Span, Spanned, SpannedItem}; + +#[derive(Debug)] +pub struct TokensIterator<'content> { + tokens: &'content [TokenNode], + span: Span, + skip_ws: bool, + index: usize, + seen: indexmap::IndexSet, +} + +#[derive(Debug)] +pub struct Checkpoint<'content, 'me> { + pub(crate) iterator: &'me mut TokensIterator<'content>, + index: usize, + seen: indexmap::IndexSet, + committed: bool, +} + +impl<'content, 'me> Checkpoint<'content, 'me> { + pub(crate) fn commit(mut self) { + self.committed = true; + } +} + +impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> { + fn drop(&mut self) { + if !self.committed { + self.iterator.index = self.index; + self.iterator.seen = self.seen.clone(); + } + } +} + +#[derive(Debug)] +pub struct Peeked<'content, 'me> { + pub(crate) node: Option<&'content TokenNode>, + iterator: &'me mut TokensIterator<'content>, + from: usize, + to: usize, +} + +impl<'content, 'me> Peeked<'content, 'me> { + pub fn commit(&mut self) -> Option<&'content TokenNode> { + let Peeked { + node, + iterator, + from, + to, + } = self; + + let node = (*node)?; + iterator.commit(*from, *to); + Some(node) + } + + pub fn not_eof( + self, + expected: impl Into, + ) -> Result, ShellError> { + match self.node { + None => Err(ShellError::unexpected_eof( + expected, + self.iterator.eof_span(), + )), + Some(node) => Ok(PeekedNode { + node, + iterator: self.iterator, + from: self.from, + to: self.to, + }), + } + } + + pub fn type_error(&self, expected: impl Into) -> ShellError { + peek_error(&self.node, self.iterator.eof_span(), expected) + } +} + +#[derive(Debug)] +pub struct PeekedNode<'content, 'me> { + pub(crate) node: &'content TokenNode, + iterator: &'me mut TokensIterator<'content>, + from: usize, + to: usize, +} + +impl<'content, 'me> PeekedNode<'content, 'me> { + pub fn commit(self) -> &'content TokenNode { + let PeekedNode { + node, + iterator, + from, + to, + } = self; + + iterator.commit(from, to); + node + } + + pub fn rollback(self) {} + + pub fn type_error(&self, expected: impl Into) -> ShellError { + peek_error(&Some(self.node), self.iterator.eof_span(), expected) + } +} + +pub fn peek_error( + node: &Option<&TokenNode>, + eof_span: Span, + expected: impl Into, +) -> ShellError { + match node { + None => ShellError::unexpected_eof(expected, eof_span), + Some(node) => ShellError::type_error(expected, node.tagged_type_name()), + } +} + +impl<'content> TokensIterator<'content> { + pub fn new( + items: &'content [TokenNode], + span: Span, + skip_ws: bool, + ) -> TokensIterator<'content> { + TokensIterator { + tokens: items, + span, + skip_ws, + index: 0, + seen: indexmap::IndexSet::new(), + } + } + + pub fn all(tokens: &'content [TokenNode], span: Span) -> TokensIterator<'content> { + TokensIterator::new(tokens, span, false) + } + + pub fn len(&self) -> usize { + self.tokens.len() + } + + pub fn spanned( + &mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> T, + ) -> Spanned { + let start = self.span_at_cursor(); + + let result = block(self); + + let end = self.span_at_cursor(); + + result.spanned(start.until(end)) + } + + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> { + let index = self.index; + let seen = self.seen.clone(); + + Checkpoint { + iterator: self, + index, + seen, + committed: false, + } + } + + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn atomic<'me, T>( + &'me mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> Result, + ) -> Result { + let index = self.index; + let seen = self.seen.clone(); + + let checkpoint = Checkpoint { + iterator: self, + index, + seen, + committed: false, + }; + + let value = block(checkpoint.iterator)?; + + checkpoint.commit(); + return Ok(value); + } + + fn eof_span(&self) -> Span { + Span::new(self.span.end(), self.span.end()) + } + + pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> { + let next = self.peek_any(); + + match next.node { + None => "end".spanned(self.eof_span()), + Some(node) => node.spanned_type_name(), + } + } + + pub fn span_at_cursor(&mut self) -> Span { + let next = self.peek_any(); + + match next.node { + None => self.eof_span(), + Some(node) => node.span(), + } + } + + pub fn remove(&mut self, position: usize) { + self.seen.insert(position); + } + + pub fn at_end(&self) -> bool { + peek(self, self.skip_ws).is_none() + } + + pub fn at_end_possible_ws(&self) -> bool { + peek(self, true).is_none() + } + + pub fn advance(&mut self) { + self.seen.insert(self.index); + self.index += 1; + } + + pub fn extract(&mut self, f: impl Fn(&TokenNode) -> Option) -> Option<(usize, T)> { + for (i, item) in self.tokens.iter().enumerate() { + if self.seen.contains(&i) { + continue; + } + + match f(item) { + None => { + continue; + } + Some(value) => { + self.seen.insert(i); + return Some((i, value)); + } + } + } + + None + } + + pub fn move_to(&mut self, pos: usize) { + self.index = pos; + } + + pub fn restart(&mut self) { + self.index = 0; + } + + pub fn clone(&self) -> TokensIterator<'content> { + TokensIterator { + tokens: self.tokens, + span: self.span, + index: self.index, + seen: self.seen.clone(), + skip_ws: self.skip_ws, + } + } + + // Get the next token, not including whitespace + pub fn next_non_ws(&mut self) -> Option<&TokenNode> { + let mut peeked = start_next(self, true); + peeked.commit() + } + + // Peek the next token, not including whitespace + pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> { + start_next(self, true) + } + + // Peek the next token, including whitespace + pub fn peek_any<'me>(&'me mut self) -> Peeked<'content, 'me> { + start_next(self, false) + } + + // Peek the next token, including whitespace, but not EOF + pub fn peek_any_token<'me, T>( + &'me mut self, + block: impl FnOnce(&'content TokenNode) -> Result, + ) -> Result { + let peeked = start_next(self, false); + let peeked = peeked.not_eof("invariant"); + + match peeked { + Err(err) => return Err(err), + Ok(peeked) => match block(peeked.node) { + Err(err) => return Err(err), + Ok(val) => { + peeked.commit(); + return Ok(val); + } + }, + } + } + + fn commit(&mut self, from: usize, to: usize) { + for index in from..to { + self.seen.insert(index); + } + + self.index = to; + } + + pub fn pos(&self, skip_ws: bool) -> Option { + peek_pos(self, skip_ws) + } + + pub fn debug_remaining(&self) -> Vec { + let mut tokens = self.clone(); + tokens.restart(); + tokens.cloned().collect() + } +} + +impl<'content> Iterator for TokensIterator<'content> { + type Item = &'content TokenNode; + + fn next(&mut self) -> Option<&'content TokenNode> { + next(self, self.skip_ws) + } +} + +fn peek<'content, 'me>( + iterator: &'me TokensIterator<'content>, + skip_ws: bool, +) -> Option<&'me TokenNode> { + let mut to = iterator.index; + + loop { + if to >= iterator.tokens.len() { + return None; + } + + if iterator.seen.contains(&to) { + to += 1; + continue; + } + + if to >= iterator.tokens.len() { + return None; + } + + let node = &iterator.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => { + return Some(node); + } + } + } +} + +fn peek_pos<'content, 'me>( + iterator: &'me TokensIterator<'content>, + skip_ws: bool, +) -> Option { + let mut to = iterator.index; + + loop { + if to >= iterator.tokens.len() { + return None; + } + + if iterator.seen.contains(&to) { + to += 1; + continue; + } + + if to >= iterator.tokens.len() { + return None; + } + + let node = &iterator.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => return Some(to), + } + } +} + +fn start_next<'content, 'me>( + iterator: &'me mut TokensIterator<'content>, + skip_ws: bool, +) -> Peeked<'content, 'me> { + let from = iterator.index; + let mut to = iterator.index; + + loop { + if to >= iterator.tokens.len() { + return Peeked { + node: None, + iterator, + from, + to, + }; + } + + if iterator.seen.contains(&to) { + to += 1; + continue; + } + + if to >= iterator.tokens.len() { + return Peeked { + node: None, + iterator, + from, + to, + }; + } + + let node = &iterator.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => { + to += 1; + return Peeked { + node: Some(node), + iterator, + from, + to, + }; + } + } + } +} + +fn next<'me, 'content>( + iterator: &'me mut TokensIterator<'content>, + skip_ws: bool, +) -> Option<&'content TokenNode> { + loop { + if iterator.index >= iterator.tokens.len() { + return None; + } + + if iterator.seen.contains(&iterator.index) { + iterator.advance(); + continue; + } + + if iterator.index >= iterator.tokens.len() { + return None; + } + + match &iterator.tokens[iterator.index] { + TokenNode::Whitespace(_) if skip_ws => { + iterator.advance(); + } + other => { + iterator.advance(); + return Some(other); + } + } + } +} diff --git a/src/parser/hir/tokens_iterator/debug.rs b/src/parser/hir/tokens_iterator/debug.rs new file mode 100644 index 000000000..2e2672015 --- /dev/null +++ b/src/parser/hir/tokens_iterator/debug.rs @@ -0,0 +1,30 @@ +use crate::parser::hir::tokens_iterator::TokensIterator; +use crate::traits::ToDebug; + +#[derive(Debug)] +pub(crate) enum DebugIteratorToken { + Seen(String), + Unseen(String), + Cursor, +} + +pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec { + let mut out = vec![]; + + for (i, token) in iterator.tokens.iter().enumerate() { + if iterator.index == i { + out.push(DebugIteratorToken::Cursor); + } + + if iterator.seen.contains(&i) { + out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source)))); + } else { + out.push(DebugIteratorToken::Unseen(format!( + "{}", + token.debug(source) + ))); + } + } + + out +} diff --git a/src/parser/parse/files.rs b/src/parser/parse/files.rs index afe75ddb2..8a2d3c90e 100644 --- a/src/parser/parse/files.rs +++ b/src/parser/parse/files.rs @@ -1,7 +1,7 @@ -use crate::Tag; +use crate::Span; use derive_new::new; use language_reporting::{FileName, Location}; -use uuid::Uuid; +use log::trace; #[derive(new, Debug, Clone)] pub struct Files { @@ -9,20 +9,20 @@ pub struct Files { } impl language_reporting::ReportingFiles for Files { - type Span = Tag; - type FileId = Uuid; + type Span = Span; + type FileId = usize; fn byte_span( &self, - file: Self::FileId, + _file: Self::FileId, from_index: usize, to_index: usize, ) -> Option { - Some(Tag::from((from_index, to_index, file))) + Some(Span::new(from_index, to_index)) } - fn file_id(&self, tag: Self::Span) -> Self::FileId { - tag.anchor + fn file_id(&self, _tag: Self::Span) -> Self::FileId { + 0 } fn file_name(&self, _file: Self::FileId) -> FileName { @@ -38,8 +38,18 @@ impl language_reporting::ReportingFiles for Files { let mut seen_lines = 0; let mut seen_bytes = 0; - for (pos, _) in source.match_indices('\n') { - if pos > byte_index { + for (pos, slice) in source.match_indices('\n') { + trace!( + "SEARCH={} SEEN={} POS={} SLICE={:?} LEN={} ALL={:?}", + byte_index, + seen_bytes, + pos, + slice, + source.len(), + source + ); + + if pos >= byte_index { return Some(language_reporting::Location::new( seen_lines, byte_index - seen_bytes, @@ -53,18 +63,18 @@ impl language_reporting::ReportingFiles for Files { if seen_lines == 0 { Some(language_reporting::Location::new(0, byte_index)) } else { - None + panic!("byte index {} wasn't valid", byte_index); } } - fn line_span(&self, file: Self::FileId, lineno: usize) -> Option { + fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option { let source = &self.snippet; let mut seen_lines = 0; let mut seen_bytes = 0; for (pos, _) in source.match_indices('\n') { if seen_lines == lineno { - return Some(Tag::from((seen_bytes, pos, file))); + return Some(Span::new(seen_bytes, pos + 1)); } else { seen_lines += 1; seen_bytes = pos + 1; @@ -72,18 +82,20 @@ impl language_reporting::ReportingFiles for Files { } if seen_lines == 0 { - Some(Tag::from((0, self.snippet.len() - 1, file))) + Some(Span::new(0, self.snippet.len() - 1)) } else { None } } - fn source(&self, tag: Self::Span) -> Option { - if tag.span.start > tag.span.end { + fn source(&self, span: Self::Span) -> Option { + trace!("source(tag={:?}) snippet={:?}", span, self.snippet); + + if span.start() > span.end() { return None; - } else if tag.span.end >= self.snippet.len() { + } else if span.end() > self.snippet.len() { return None; } - Some(tag.slice(&self.snippet).to_string()) + Some(span.slice(&self.snippet).to_string()) } } diff --git a/src/parser/parse/flag.rs b/src/parser/parse/flag.rs index 09d1e8633..28b6749f1 100644 --- a/src/parser/parse/flag.rs +++ b/src/parser/parse/flag.rs @@ -1,4 +1,5 @@ -use crate::Tag; +use crate::parser::hir::syntax_shape::flat_shape::FlatShape; +use crate::{Span, Spanned, SpannedItem}; use derive_new::new; use getset::Getters; use serde::{Deserialize, Serialize}; @@ -12,6 +13,15 @@ pub enum FlagKind { #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)] #[get = "pub(crate)"] pub struct Flag { - kind: FlagKind, - name: Tag, + pub(crate) kind: FlagKind, + pub(crate) name: Span, +} + +impl Spanned { + pub fn color(&self) -> Spanned { + match self.item.kind { + FlagKind::Longhand => FlatShape::Flag.spanned(self.span), + FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span), + } + } } diff --git a/src/parser/parse/operator.rs b/src/parser/parse/operator.rs index 82a04ed79..7b5a5c77d 100644 --- a/src/parser/parse/operator.rs +++ b/src/parser/parse/operator.rs @@ -11,6 +11,7 @@ pub enum Operator { GreaterThan, LessThanOrEqual, GreaterThanOrEqual, + Dot, } impl ToDebug for Operator { @@ -32,6 +33,7 @@ impl Operator { Operator::GreaterThan => ">", Operator::LessThanOrEqual => "<=", Operator::GreaterThanOrEqual => ">=", + Operator::Dot => ".", } } } @@ -52,6 +54,7 @@ impl FromStr for Operator { ">" => Ok(Operator::GreaterThan), "<=" => Ok(Operator::LessThanOrEqual), ">=" => Ok(Operator::GreaterThanOrEqual), + "." => Ok(Operator::Dot), _ => Err(()), } } diff --git a/src/parser/parse/parser.rs b/src/parser/parse/parser.rs index 33903ba37..793f7b6ce 100644 --- a/src/parser/parse/parser.rs +++ b/src/parser/parse/parser.rs @@ -14,24 +14,49 @@ use nom::combinator::*; use nom::multi::*; use nom::sequence::*; +use derive_new::new; use log::trace; use nom::dbg; use nom::*; use nom::{AsBytes, FindSubstring, IResult, InputLength, InputTake, Slice}; use nom_locate::{position, LocatedSpanEx}; +use nom_tracable::{tracable_parser, HasTracableInfo, TracableInfo}; use serde::{Deserialize, Serialize}; use std::fmt::Debug; use std::str::FromStr; -use uuid::Uuid; -pub type NomSpan<'a> = LocatedSpanEx<&'a str, Uuid>; +pub type NomSpan<'a> = LocatedSpanEx<&'a str, TracableContext>; -pub fn nom_input(s: &str, anchor: Uuid) -> NomSpan<'_> { - LocatedSpanEx::new_extra(s, anchor) +#[derive(Debug, Clone, Copy, PartialEq, new)] +pub struct TracableContext { + pub(crate) info: TracableInfo, +} + +impl HasTracableInfo for TracableContext { + fn get_tracable_info(&self) -> TracableInfo { + self.info + } + + fn set_tracable_info(mut self, info: TracableInfo) -> Self { + TracableContext { info } + } +} + +impl std::ops::Deref for TracableContext { + type Target = TracableInfo; + + fn deref(&self) -> &TracableInfo { + &self.info + } +} + +pub fn nom_input(s: &str) -> NomSpan<'_> { + LocatedSpanEx::new_extra(s, TracableContext::new(TracableInfo::new())) } macro_rules! operator { ($name:tt : $token:tt ) => { + #[tracable_parser] pub fn $name(input: NomSpan) -> IResult { let start = input.offset; let (input, tag) = tag(stringify!($token))(input)?; @@ -39,7 +64,7 @@ macro_rules! operator { Ok(( input, - TokenTreeBuilder::tagged_op(tag.fragment, (start, end, input.extra)), + TokenTreeBuilder::spanned_op(tag.fragment, Span::new(start, end)), )) } }; @@ -51,25 +76,7 @@ operator! { gte: >= } operator! { lte: <= } operator! { eq: == } operator! { neq: != } - -fn trace_step<'a, T: Debug>( - input: NomSpan<'a>, - name: &str, - block: impl FnOnce(NomSpan<'a>) -> IResult, T>, -) -> IResult, T> { - trace!(target: "nu::lite_parse", "+ before {} @ {:?}", name, input); - match block(input) { - Ok((input, result)) => { - trace!(target: "nu::lite_parse", "after {} @ {:?} -> {:?}", name, input, result); - Ok((input, result)) - } - - Err(e) => { - trace!(target: "nu::lite_parse", "- failed {} :: {:?}", name, e); - Err(e) - } - } -} +operator! { dot: . } #[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)] pub enum Number { @@ -77,6 +84,15 @@ pub enum Number { Decimal(BigDecimal), } +impl std::fmt::Display for Number { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Number::Int(int) => write!(f, "{}", int), + Number::Decimal(decimal) => write!(f, "{}", decimal), + } + } +} + macro_rules! primitive_int { ($($ty:ty)*) => { $( @@ -148,540 +164,482 @@ impl Into for BigDecimal { } } -pub fn raw_number(input: NomSpan) -> IResult> { +#[tracable_parser] +pub fn number(input: NomSpan) -> IResult { + let (input, number) = raw_number(input)?; + + Ok(( + input, + TokenTreeBuilder::spanned_number(number.item, number.span), + )) +} + +#[tracable_parser] +pub fn raw_number(input: NomSpan) -> IResult> { let anchoral = input; let start = input.offset; - trace_step(input, "raw_decimal", move |input| { - let (input, neg) = opt(tag("-"))(input)?; - let (input, head) = digit1(input)?; - let dot: IResult = tag(".")(input); + let (input, neg) = opt(tag("-"))(input)?; + let (input, head) = digit1(input)?; - let input = match dot { - Ok((input, dot)) => input, + match input.fragment.chars().next() { + None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))), + Some('.') => (), + other if is_boundary(other) => { + return Ok((input, RawNumber::int(Span::new(start, input.offset)))) + } + _ => { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Tag, + ))) + } + } - // it's just an integer - Err(_) => return Ok((input, RawNumber::int((start, input.offset, input.extra)))), - }; + let dot: IResult = tag(".")(input); - let (input, tail) = digit1(input)?; + let input = match dot { + Ok((input, dot)) => input, - let end = input.offset; + // it's just an integer + Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))), + }; - Ok((input, RawNumber::decimal((start, end, input.extra)))) - }) + let (input, tail) = digit1(input)?; + + let end = input.offset; + + let next = input.fragment.chars().next(); + + if is_boundary(next) { + Ok((input, RawNumber::decimal(Span::new(start, end)))) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Tag, + ))) + } } +#[tracable_parser] pub fn operator(input: NomSpan) -> IResult { - trace_step(input, "operator", |input| { - let (input, operator) = alt((gte, lte, neq, gt, lt, eq))(input)?; + let (input, operator) = alt((gte, lte, neq, gt, lt, eq))(input)?; - Ok((input, operator)) - }) + Ok((input, operator)) } +#[tracable_parser] pub fn dq_string(input: NomSpan) -> IResult { - trace_step(input, "dq_string", |input| { - let start = input.offset; - let (input, _) = char('"')(input)?; - let start1 = input.offset; - let (input, _) = many0(none_of("\""))(input)?; - let end1 = input.offset; - let (input, _) = char('"')(input)?; - let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), - )) - }) + let start = input.offset; + let (input, _) = char('"')(input)?; + let start1 = input.offset; + let (input, _) = many0(none_of("\""))(input)?; + let end1 = input.offset; + let (input, _) = char('"')(input)?; + let end = input.offset; + Ok(( + input, + TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)), + )) } +#[tracable_parser] pub fn sq_string(input: NomSpan) -> IResult { - trace_step(input, "sq_string", move |input| { - let start = input.offset; - let (input, _) = char('\'')(input)?; - let start1 = input.offset; - let (input, _) = many0(none_of("\'"))(input)?; - let end1 = input.offset; - let (input, _) = char('\'')(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = char('\'')(input)?; + let start1 = input.offset; + let (input, _) = many0(none_of("\'"))(input)?; + let end1 = input.offset; + let (input, _) = char('\'')(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)), + )) } +#[tracable_parser] pub fn string(input: NomSpan) -> IResult { - trace_step(input, "string", move |input| { - alt((sq_string, dq_string))(input) - }) + alt((sq_string, dq_string))(input) } +#[tracable_parser] pub fn external(input: NomSpan) -> IResult { - trace_step(input, "external", move |input| { - let start = input.offset; - let (input, _) = tag("^")(input)?; - let (input, bare) = take_while(is_bare_char)(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("^")(input)?; + let (input, bare) = take_while(is_bare_char)(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_external(bare, (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_external_command(bare, Span::new(start, end)), + )) } +#[tracable_parser] pub fn pattern(input: NomSpan) -> IResult { - trace_step(input, "bare", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_start_glob_char)(input)?; - let (input, _) = take_while(is_glob_char)(input)?; + let start = input.offset; + let (input, _) = take_while1(is_start_glob_char)(input)?; + let (input, _) = take_while(is_glob_char)(input)?; - let next_char = &input.fragment.chars().nth(0); + let next_char = &input.fragment.chars().nth(0); - if let Some(next_char) = next_char { - if is_external_word_char(*next_char) { - return Err(nom::Err::Error(nom::error::make_error( - input, - nom::error::ErrorKind::TakeWhile1, - ))); - } + if let Some(next_char) = next_char { + if is_external_word_char(*next_char) { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::TakeWhile1, + ))); } + } - let end = input.offset; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_pattern((start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_pattern(Span::new(start, end)), + )) } +#[tracable_parser] pub fn bare(input: NomSpan) -> IResult { - trace_step(input, "bare", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_start_bare_char)(input)?; - let (input, _) = take_while(is_bare_char)(input)?; + let start = input.offset; + let (input, _) = take_while1(is_start_bare_char)(input)?; + let (input, last) = take_while(is_bare_char)(input)?; - let next_char = &input.fragment.chars().nth(0); + let next_char = &input.fragment.chars().nth(0); + let prev_char = last.fragment.chars().nth(0); - if let Some(next_char) = next_char { - if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) { - return Err(nom::Err::Error(nom::error::make_error( - input, - nom::error::ErrorKind::TakeWhile1, - ))); - } + // if let (Some(prev), Some(next)) = (prev_char, next_char) { + // if prev == '.' && is_member_start(*next) { + // return Err(nom::Err::Error(nom::error::make_error( + // input, + // nom::error::ErrorKind::TakeWhile1, + // ))); + // } + // } + + if let Some(next_char) = next_char { + if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::TakeWhile1, + ))); } + } - let end = input.offset; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_bare((start, end, input.extra)), - )) - }) + Ok((input, TokenTreeBuilder::spanned_bare(Span::new(start, end)))) } +#[tracable_parser] pub fn external_word(input: NomSpan) -> IResult { - trace_step(input, "bare", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_external_word_char)(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = take_while1(is_external_word_char)(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_external_word((start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_external_word(Span::new(start, end)), + )) } +#[tracable_parser] pub fn var(input: NomSpan) -> IResult { - trace_step(input, "var", move |input| { - let start = input.offset; - let (input, _) = tag("$")(input)?; - let (input, bare) = member(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("$")(input)?; + let (input, bare) = ident(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_var(bare.tag(), (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_var(bare, Span::new(start, end)), + )) } -pub fn member(input: NomSpan) -> IResult { - trace_step(input, "identifier", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_id_start)(input)?; - let (input, _) = take_while(is_id_continue)(input)?; +#[tracable_parser] +pub fn ident(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = take_while1(is_start_bare_char)(input)?; + let (input, _) = take_while(is_bare_char)(input)?; + let end = input.offset; - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_member((start, end, input.extra)), - )) - }) + Ok((input, Tag::from((start, end, None)))) } +#[tracable_parser] pub fn flag(input: NomSpan) -> IResult { - trace_step(input, "flag", move |input| { - let start = input.offset; - let (input, _) = tag("--")(input)?; - let (input, bare) = bare(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("--")(input)?; + let (input, bare) = bare(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_flag(bare.tag(), (start, end, input.extra)), - )) - }) + Ok(( + input, + TokenTreeBuilder::spanned_flag(bare.span(), Span::new(start, end)), + )) } +#[tracable_parser] pub fn shorthand(input: NomSpan) -> IResult { - trace_step(input, "shorthand", move |input| { - let start = input.offset; - let (input, _) = tag("-")(input)?; - let (input, bare) = bare(input)?; - let end = input.offset; + let start = input.offset; + let (input, _) = tag("-")(input)?; + let (input, bare) = bare(input)?; + let end = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_shorthand(bare.tag(), (start, end, input.extra)), - )) - }) -} - -pub fn raw_unit(input: NomSpan) -> IResult> { - trace_step(input, "raw_unit", move |input| { - let start = input.offset; - let (input, unit) = alt(( - tag("B"), - tag("b"), - tag("KB"), - tag("kb"), - tag("Kb"), - tag("K"), - tag("k"), - tag("MB"), - tag("mb"), - tag("Mb"), - tag("GB"), - tag("gb"), - tag("Gb"), - tag("TB"), - tag("tb"), - tag("Tb"), - tag("PB"), - tag("pb"), - tag("Pb"), - ))(input)?; - let end = input.offset; - - Ok(( - input, - Unit::from(unit.fragment).tagged((start, end, input.extra)), - )) - }) -} - -pub fn size(input: NomSpan) -> IResult { - trace_step(input, "size", move |input| { - let mut is_size = false; - let start = input.offset; - let (input, number) = raw_number(input)?; - if let Ok((input, Some(size))) = opt(raw_unit)(input) { - let end = input.offset; - - // Check to make sure there is no trailing parseable characters - if let Ok((input, Some(extra))) = opt(bare)(input) { - return Err(nom::Err::Error((input, nom::error::ErrorKind::Char))); - } - - Ok(( - input, - TokenTreeBuilder::tagged_size((number.item, *size), (start, end, input.extra)), - )) - } else { - let end = input.offset; - - // Check to make sure there is no trailing parseable characters - if let Ok((input, Some(extra))) = opt(bare)(input) { - return Err(nom::Err::Error((input, nom::error::ErrorKind::Char))); - } - - Ok(( - input, - TokenTreeBuilder::tagged_number(number.item, number.tag), - )) - } - }) + Ok(( + input, + TokenTreeBuilder::spanned_shorthand(bare.span(), Span::new(start, end)), + )) } +#[tracable_parser] pub fn leaf(input: NomSpan) -> IResult { - trace_step(input, "leaf", move |input| { - let (input, node) = alt(( - size, - string, - operator, - flag, - shorthand, - var, - external, - bare, - pattern, - external_word, - ))(input)?; + let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?; - Ok((input, node)) - }) + Ok((input, node)) } -pub fn token_list(input: NomSpan) -> IResult> { - trace_step(input, "token_list", move |input| { - let (input, first) = node(input)?; - let (input, list) = many0(pair(space1, node))(input)?; +#[tracable_parser] +pub fn token_list(input: NomSpan) -> IResult>> { + let start = input.offset; + let (input, first) = node(input)?; - Ok((input, make_token_list(None, first, list, None))) - }) + let (input, mut list) = many0(pair(alt((whitespace, dot)), node))(input)?; + + let end = input.offset; + + Ok(( + input, + make_token_list(first, list, None).tagged((start, end, None)), + )) } -pub fn spaced_token_list(input: NomSpan) -> IResult> { - trace_step(input, "spaced_token_list", move |input| { - let (input, sp_left) = opt(space1)(input)?; - let (input, first) = node(input)?; - let (input, list) = many0(pair(space1, node))(input)?; - let (input, sp_right) = opt(space1)(input)?; +#[tracable_parser] +pub fn spaced_token_list(input: NomSpan) -> IResult>> { + let start = input.offset; + let (input, pre_ws) = opt(whitespace)(input)?; + let (input, items) = token_list(input)?; + let (input, post_ws) = opt(whitespace)(input)?; + let end = input.offset; - Ok((input, make_token_list(sp_left, first, list, sp_right))) - }) + let mut out = vec![]; + + out.extend(pre_ws); + out.extend(items.item); + out.extend(post_ws); + + Ok((input, out.spanned(Span::new(start, end)))) } fn make_token_list( - sp_left: Option, - first: TokenNode, - list: Vec<(NomSpan, TokenNode)>, - sp_right: Option, + first: Vec, + list: Vec<(TokenNode, Vec)>, + sp_right: Option, ) -> Vec { let mut nodes = vec![]; - if let Some(sp_left) = sp_left { - nodes.push(TokenNode::Whitespace(Tag::from(sp_left))); - } + nodes.extend(first); - nodes.push(first); - - for (ws, token) in list { - nodes.push(TokenNode::Whitespace(Tag::from(ws))); - nodes.push(token); + for (left, right) in list { + nodes.push(left); + nodes.extend(right); } if let Some(sp_right) = sp_right { - nodes.push(TokenNode::Whitespace(Tag::from(sp_right))); + nodes.push(sp_right); } nodes } +#[tracable_parser] pub fn whitespace(input: NomSpan) -> IResult { - trace_step(input, "whitespace", move |input| { - let left = input.offset; - let (input, ws1) = space1(input)?; - let right = input.offset; + let left = input.offset; + let (input, ws1) = space1(input)?; + let right = input.offset; - Ok(( - input, - TokenTreeBuilder::tagged_ws((left, right, input.extra)), - )) - }) + Ok((input, TokenTreeBuilder::spanned_ws(Span::new(left, right)))) } -pub fn delimited_paren(input: NomSpan) -> IResult { - trace_step(input, "delimited_paren", move |input| { - let left = input.offset; - let (input, _) = char('(')(input)?; - let (input, ws1) = opt(whitespace)(input)?; - let (input, inner_items) = opt(token_list)(input)?; - let (input, ws2) = opt(whitespace)(input)?; - let (input, _) = char(')')(input)?; - let right = input.offset; +pub fn delimited( + input: NomSpan, + delimiter: Delimiter, +) -> IResult>)> { + let left = input.offset; + let (input, open_span) = tag(delimiter.open())(input)?; + let (input, inner_items) = opt(spaced_token_list)(input)?; + let (input, close_span) = tag(delimiter.close())(input)?; + let right = input.offset; - let mut items = vec![]; + let mut items = vec![]; - if let Some(space) = ws1 { - items.push(space); - } + if let Some(inner_items) = inner_items { + items.extend(inner_items.item); + } - if let Some(inner_items) = inner_items { - items.extend(inner_items); - } - - if let Some(space) = ws2 { - items.push(space); - } - - Ok(( - input, - TokenTreeBuilder::tagged_parens(items, (left, right, input.extra)), - )) - }) -} - -pub fn delimited_square(input: NomSpan) -> IResult { - trace_step(input, "delimited_paren", move |input| { - let left = input.offset; - let (input, _) = char('[')(input)?; - let (input, ws1) = opt(whitespace)(input)?; - let (input, inner_items) = opt(token_list)(input)?; - let (input, ws2) = opt(whitespace)(input)?; - let (input, _) = char(']')(input)?; - let right = input.offset; - - let mut items = vec![]; - - if let Some(space) = ws1 { - items.push(space); - } - - if let Some(inner_items) = inner_items { - items.extend(inner_items); - } - - if let Some(space) = ws2 { - items.push(space); - } - - Ok(( - input, - TokenTreeBuilder::tagged_square(items, (left, right, input.extra)), - )) - }) -} - -pub fn delimited_brace(input: NomSpan) -> IResult { - trace_step(input, "delimited_brace", move |input| { - let left = input.offset; - let (input, _) = char('{')(input)?; - let (input, _) = opt(space1)(input)?; - let (input, items) = opt(token_list)(input)?; - let (input, _) = opt(space1)(input)?; - let (input, _) = char('}')(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_brace( - items.unwrap_or_else(|| vec![]), - (left, right, input.extra), - ), - )) - }) -} - -pub fn raw_call(input: NomSpan) -> IResult> { - trace_step(input, "raw_call", move |input| { - let left = input.offset; - let (input, items) = token_list(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_call(items, (left, right, input.extra)), - )) - }) -} - -pub fn path(input: NomSpan) -> IResult { - trace_step(input, "path", move |input| { - let left = input.offset; - let (input, head) = node1(input)?; - let (input, _) = tag(".")(input)?; - let (input, tail) = separated_list(tag("."), alt((member, string)))(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_path((head, tail), (left, right, input.extra)), - )) - }) -} - -pub fn node1(input: NomSpan) -> IResult { - trace_step(input, "node1", alt((leaf, delimited_paren))) -} - -pub fn node(input: NomSpan) -> IResult { - trace_step( + Ok(( input, - "node", - alt(( - path, - leaf, - delimited_paren, - delimited_brace, - delimited_square, - )), - ) + ( + Span::from(open_span), + Span::from(close_span), + items.spanned(Span::new(left, right)), + ), + )) } +#[tracable_parser] +pub fn delimited_paren(input: NomSpan) -> IResult { + let (input, (left, right, tokens)) = delimited(input, Delimiter::Paren)?; + + Ok(( + input, + TokenTreeBuilder::spanned_parens(tokens.item, (left, right), tokens.span), + )) +} + +#[tracable_parser] +pub fn delimited_square(input: NomSpan) -> IResult { + let (input, (left, right, tokens)) = delimited(input, Delimiter::Square)?; + + Ok(( + input, + TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span), + )) +} + +#[tracable_parser] +pub fn delimited_brace(input: NomSpan) -> IResult { + let (input, (left, right, tokens)) = delimited(input, Delimiter::Brace)?; + + Ok(( + input, + TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span), + )) +} + +#[tracable_parser] +pub fn raw_call(input: NomSpan) -> IResult> { + let left = input.offset; + let (input, items) = token_list(input)?; + let right = input.offset; + + Ok(( + input, + TokenTreeBuilder::tagged_call(items.item, (left, right, input.extra)), + )) +} + +#[tracable_parser] +pub fn bare_path(input: NomSpan) -> IResult> { + let (input, head) = alt((bare, dot))(input)?; + + let (input, tail) = many0(alt((bare, dot, string)))(input)?; + + let next_char = &input.fragment.chars().nth(0); + + if is_boundary(*next_char) { + let mut result = vec![head]; + result.extend(tail); + + Ok((input, result)) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Many0, + ))) + } +} + +#[tracable_parser] +pub fn pattern_path(input: NomSpan) -> IResult> { + let (input, head) = alt((pattern, dot))(input)?; + + let (input, tail) = many0(alt((pattern, dot, string)))(input)?; + + let next_char = &input.fragment.chars().nth(0); + + if is_boundary(*next_char) { + let mut result = vec![head]; + result.extend(tail); + + Ok((input, result)) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Many0, + ))) + } +} + +#[tracable_parser] +pub fn node1(input: NomSpan) -> IResult { + alt((leaf, bare, pattern, external_word, delimited_paren))(input) +} + +#[tracable_parser] +pub fn node(input: NomSpan) -> IResult> { + alt(( + to_list(leaf), + bare_path, + pattern_path, + to_list(external_word), + to_list(delimited_paren), + to_list(delimited_brace), + to_list(delimited_square), + ))(input) +} + +fn to_list( + parser: impl Fn(NomSpan) -> IResult, +) -> impl Fn(NomSpan) -> IResult> { + move |input| { + let (input, next) = parser(input)?; + + Ok((input, vec![next])) + } +} + +#[tracable_parser] +pub fn nodes(input: NomSpan) -> IResult { + let (input, tokens) = token_list(input)?; + + Ok(( + input, + TokenTreeBuilder::tagged_token_list(tokens.item, tokens.tag), + )) +} + +#[tracable_parser] pub fn pipeline(input: NomSpan) -> IResult { - trace_step(input, "pipeline", |input| { - let start = input.offset; - let (input, head) = opt(tuple((opt(space1), raw_call, opt(space1))))(input)?; - let (input, items) = trace_step( + let start = input.offset; + let (input, head) = spaced_token_list(input)?; + let (input, items) = many0(tuple((tag("|"), spaced_token_list)))(input)?; + + if input.input_len() != 0 { + return Err(Err::Error(error_position!( input, - "many0", - many0(tuple((tag("|"), opt(space1), raw_call, opt(space1)))), - )?; - - let (input, tail) = opt(space1)(input)?; - let (input, newline) = opt(multispace1)(input)?; - - if input.input_len() != 0 { - return Err(Err::Error(error_position!( - input, - nom::error::ErrorKind::Eof - ))); - } - - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::tagged_pipeline( - (make_call_list(head, items), tail.map(Tag::from)), - (start, end, input.extra), - ), - )) - }) -} - -fn make_call_list( - head: Option<(Option, Tagged, Option)>, - items: Vec<(NomSpan, Option, Tagged, Option)>, -) -> Vec { - let mut out = vec![]; - - if let Some(head) = head { - let el = PipelineElement::new(None, head.0.map(Tag::from), head.1, head.2.map(Tag::from)); - - out.push(el); + nom::error::ErrorKind::Eof + ))); } - for (pipe, ws1, call, ws2) in items { - let el = PipelineElement::new( - Some(pipe).map(Tag::from), - ws1.map(Tag::from), - call, - ws2.map(Tag::from), - ); + let end = input.offset; - out.push(el); - } + let head_span = head.span; + let mut all_items: Vec> = + vec![PipelineElement::new(None, head).spanned(head_span)]; - out + all_items.extend(items.into_iter().map(|(pipe, items)| { + let items_span = items.span; + PipelineElement::new(Some(Span::from(pipe)), items) + .spanned(Span::from(pipe).until(items_span)) + })); + + Ok(( + input, + TokenTreeBuilder::spanned_pipeline(all_items, Span::new(start, end)), + )) } fn int(frag: &str, neg: Option) -> i64 { @@ -693,9 +651,19 @@ fn int(frag: &str, neg: Option) -> i64 { } } +fn is_boundary(c: Option) -> bool { + match c { + None => true, + Some(')') | Some(']') | Some('}') => true, + Some(c) if c.is_whitespace() => true, + _ => false, + } +} + fn is_external_word_char(c: char) -> bool { match c { - ';' | '|' | '#' | '-' | '"' | '\'' | '$' | '(' | ')' | '[' | ']' | '{' | '}' | '`' => false, + ';' | '|' | '#' | '-' | '"' | '\'' | '$' | '(' | ')' | '[' | ']' | '{' | '}' | '`' + | '.' => false, other if other.is_whitespace() => false, _ => true, } @@ -717,8 +685,7 @@ fn is_glob_char(c: char) -> bool { fn is_start_bare_char(c: char) -> bool { match c { '+' => false, - _ if c.is_alphabetic() => true, - '.' => true, + _ if c.is_alphanumeric() => true, '\\' => true, '/' => true, '_' => true, @@ -732,7 +699,6 @@ fn is_bare_char(c: char) -> bool { match c { '+' => false, _ if c.is_alphanumeric() => true, - '.' => true, '\\' => true, '/' => true, '_' => true, @@ -759,6 +725,16 @@ fn is_id_continue(c: char) -> bool { } } +fn is_member_start(c: char) -> bool { + match c { + '"' | '\'' => true, + '1'..='9' => true, + + other if is_id_start(other) => true, + _ => false, + } +} + #[cfg(test)] mod tests { use super::*; @@ -768,45 +744,10 @@ mod tests { pub type CurriedNode = Box T + 'static>; - macro_rules! assert_leaf { - (parsers [ $($name:tt)* ] $input:tt -> $left:tt .. $right:tt { $kind:tt $parens:tt } ) => { - $( - assert_eq!( - apply($name, stringify!($name), $input), - token(RawToken::$kind $parens, $left, $right) - ); - )* - - assert_eq!( - apply(leaf, "leaf", $input), - token(RawToken::$kind $parens, $left, $right) - ); - - assert_eq!( - apply(leaf, "leaf", $input), - token(RawToken::$kind $parens, $left, $right) - ); - - assert_eq!( - apply(node, "node", $input), - token(RawToken::$kind $parens, $left, $right) - ); - }; - - (parsers [ $($name:tt)* ] $input:tt -> $left:tt .. $right:tt { $kind:tt } ) => { - $( - assert_eq!( - apply($name, stringify!($name), $input), - token(RawToken::$kind, $left, $right) - ); - )* - } - } - macro_rules! equal_tokens { ($source:tt -> $tokens:expr) => { let result = apply(pipeline, "pipeline", $source); - let (expected_tree, expected_source) = TokenTreeBuilder::build(uuid::Uuid::nil(), $tokens); + let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens); if result != expected_tree { let debug_result = format!("{}", result.debug($source)); @@ -823,53 +764,50 @@ mod tests { assert_eq!(debug_result, debug_expected) } } - - // apply(pipeline, "pipeline", r#"cargo +nightly run"#), - // build_token(b::pipeline(vec![( - // None, - // b::call( - // b::bare("cargo"), - // vec![ - // b::sp(), - // b::external_word("+nightly"), - // b::sp(), - // b::bare("run") - // ] - // ), - // None - // )])) }; + + (<$parser:tt> $source:tt -> $tokens:expr) => { + let result = apply($parser, stringify!($parser), $source); + let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens); + + if result != expected_tree { + let debug_result = format!("{}", result.debug($source)); + let debug_expected = format!("{}", expected_tree.debug(&expected_source)); + + if debug_result == debug_expected { + assert_eq!( + result, expected_tree, + "NOTE: actual and expected had equivalent debug serializations, source={:?}, debug_expected={:?}", + $source, + debug_expected + ) + } else { + assert_eq!(debug_result, debug_expected) + } + } + }; + } #[test] fn test_integer() { - assert_leaf! { - parsers [ size ] - "123" -> 0..3 { Number(RawNumber::int((0, 3, test_uuid())).item) } + equal_tokens! { + + "123" -> b::token_list(vec![b::int(123)]) } - assert_leaf! { - parsers [ size ] - "-123" -> 0..4 { Number(RawNumber::int((0, 4, test_uuid())).item) } - } - } - - #[test] - fn test_size() { - assert_leaf! { - parsers [ size ] - "123MB" -> 0..5 { Size(RawNumber::int((0, 3, test_uuid())).item, Unit::MB) } - } - - assert_leaf! { - parsers [ size ] - "10GB" -> 0..4 { Size(RawNumber::int((0, 2, test_uuid())).item, Unit::GB) } + equal_tokens! { + + "-123" -> b::token_list(vec![b::int(-123)]) } } #[test] fn test_operator() { - assert_eq!(apply(node, "node", ">"), build_token(b::op(">"))); + equal_tokens! { + + ">" -> b::token_list(vec![b::op(">")]) + } // assert_leaf! { // parsers [ operator ] @@ -899,37 +837,50 @@ mod tests { #[test] fn test_string() { - assert_leaf! { - parsers [ string dq_string ] - r#""hello world""# -> 0..13 { String(tag(1, 12)) } + equal_tokens! { + + r#""hello world""# -> b::token_list(vec![b::string("hello world")]) } - assert_leaf! { - parsers [ string sq_string ] - r"'hello world'" -> 0..13 { String(tag(1, 12)) } + equal_tokens! { + + r#"'hello world'"# -> b::token_list(vec![b::string("hello world")]) } } #[test] fn test_bare() { - assert_leaf! { - parsers [ bare ] - "hello" -> 0..5 { Bare } + equal_tokens! { + + "hello" -> b::token_list(vec![b::bare("hello")]) + } + } + + #[test] + fn test_simple_path() { + equal_tokens! { + + "450MB" -> b::token_list(vec![b::bare("450MB")]) } - assert_leaf! { - parsers [ bare ] - "chrome.exe" -> 0..10 { Bare } + equal_tokens! { + + "chrome.exe" -> b::token_list(vec![b::bare("chrome"), b::op(Operator::Dot), b::bare("exe")]) } - assert_leaf! { - parsers [ bare ] - r"C:\windows\system.dll" -> 0..21 { Bare } + equal_tokens! { + + ".azure" -> b::token_list(vec![b::op(Operator::Dot), b::bare("azure")]) } - assert_leaf! { - parsers [ bare ] - r"C:\Code\-testing\my_tests.js" -> 0..28 { Bare } + equal_tokens! { + + r"C:\windows\system.dll" -> b::token_list(vec![b::bare(r"C:\windows\system"), b::op(Operator::Dot), b::bare("dll")]) + } + + equal_tokens! { + + r"C:\Code\-testing\my_tests.js" -> b::token_list(vec![b::bare(r"C:\Code\-testing\my_tests"), b::op(Operator::Dot), b::bare("js")]) } } @@ -956,223 +907,170 @@ mod tests { #[test] fn test_variable() { - assert_leaf! { - parsers [ var ] - "$it" -> 0..3 { Variable(tag(1, 3)) } + equal_tokens! { + + "$it" -> b::token_list(vec![b::var("it")]) } - assert_leaf! { - parsers [ var ] - "$name" -> 0..5 { Variable(tag(1, 5)) } + equal_tokens! { + + "$name" -> b::token_list(vec![b::var("name")]) } } #[test] fn test_external() { - assert_leaf! { - parsers [ external ] - "^ls" -> 0..3 { ExternalCommand(tag(1, 3)) } + equal_tokens! { + + "^ls" -> b::token_list(vec![b::external_command("ls")]) + } + } + + #[test] + fn test_dot_prefixed_name() { + equal_tokens! { + + ".azure" -> b::token_list(vec![b::op("."), b::bare("azure")]) } } #[test] fn test_delimited_paren() { - assert_eq!( - apply(node, "node", "(abc)"), - build_token(b::parens(vec![b::bare("abc")])) - ); + equal_tokens! { + + "(abc)" -> b::token_list(vec![b::parens(vec![b::bare("abc")])]) + } - assert_eq!( - apply(node, "node", "( abc )"), - build_token(b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])) - ); + equal_tokens! { + + "( abc )" -> b::token_list(vec![b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "( abc def )"), - build_token(b::parens(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp() - ])) - ); + equal_tokens! { + + "( abc def )" -> b::token_list(vec![b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "( abc def 123 456GB )"), - build_token(b::parens(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp(), - b::int(123), - b::sp(), - b::size(456, "GB"), - b::sp() - ])) - ); + equal_tokens! { + + "( abc def 123 456GB )" -> b::token_list(vec![b::parens(vec![ + b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" "), b::int(123), b::ws(" "), b::bare("456GB"), b::ws(" ") + ])]) + } } #[test] fn test_delimited_square() { - assert_eq!( - apply(node, "node", "[abc]"), - build_token(b::square(vec![b::bare("abc")])) - ); + equal_tokens! { + + "[abc]" -> b::token_list(vec![b::square(vec![b::bare("abc")])]) + } - assert_eq!( - apply(node, "node", "[ abc ]"), - build_token(b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])) - ); + equal_tokens! { + + "[ abc ]" -> b::token_list(vec![b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "[ abc def ]"), - build_token(b::square(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp() - ])) - ); + equal_tokens! { + + "[ abc def ]" -> b::token_list(vec![b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "[ abc def 123 456GB ]"), - build_token(b::square(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp(), - b::int(123), - b::sp(), - b::size(456, "GB"), - b::sp() - ])) - ); + equal_tokens! { + + "[ abc def 123 456GB ]" -> b::token_list(vec![b::square(vec![ + b::ws(" "), b::bare("abc"), b::ws(" "), b::bare("def"), b::ws(" "), b::int(123), b::ws(" "), b::bare("456GB"), b::ws(" ") + ])]) + } } #[test] fn test_path() { let _ = pretty_env_logger::try_init(); - assert_eq!( - apply(node, "node", "$it.print"), - build_token(b::path(b::var("it"), vec![b::member("print")])) - ); - assert_eq!( - apply(node, "node", "$head.part1.part2"), - build_token(b::path( - b::var("head"), - vec![b::member("part1"), b::member("part2")] - )) - ); + equal_tokens! { + + "$it.print" -> b::token_list(vec![b::var("it"), b::op("."), b::bare("print")]) + } - assert_eq!( - apply(node, "node", "( hello ).world"), - build_token(b::path( - b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), - vec![b::member("world")] - )) - ); + equal_tokens! { + + "$head.part1.part2" -> b::token_list(vec![b::var("head"), b::op("."), b::bare("part1"), b::op("."), b::bare("part2")]) + } - assert_eq!( - apply(node, "node", "( hello ).\"world\""), - build_token(b::path( - b::parens(vec![b::sp(), b::bare("hello"), b::sp()],), - vec![b::string("world")] - )) - ); + equal_tokens! { + + "( hello ).world" -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::op("."), b::bare("world")]) + } + + equal_tokens! { + + r#"( hello )."world""# -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::op("."), b::string("world")]) + } } #[test] fn test_nested_path() { - assert_eq!( - apply( - node, - "node", - "( $it.is.\"great news\".right yep $yep ).\"world\"" - ), - build_token(b::path( - b::parens(vec![ - b::sp(), - b::path( + equal_tokens! { + + r#"( $it.is."great news".right yep $yep )."world""# -> b::token_list( + vec![ + b::parens(vec![ + b::sp(), b::var("it"), - vec![b::member("is"), b::string("great news"), b::member("right")] - ), - b::sp(), - b::bare("yep"), - b::sp(), - b::var("yep"), - b::sp() - ]), - vec![b::string("world")] - )) - ) + b::op("."), + b::bare("is"), + b::op("."), + b::string("great news"), + b::op("."), + b::bare("right"), + b::sp(), + b::bare("yep"), + b::sp(), + b::var("yep"), + b::sp() + ]), + b::op("."), b::string("world")] + ) + } } #[test] fn test_smoke_single_command() { - assert_eq!( - apply(raw_call, "raw_call", "git add ."), - build(b::call( - b::bare("git"), - vec![b::sp(), b::bare("add"), b::sp(), b::bare(".")] - )) - ); + equal_tokens! { + + "git add ." -> b::token_list(vec![b::bare("git"), b::sp(), b::bare("add"), b::sp(), b::op(".")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml")] - )) - ); + equal_tokens! { + + "open Cargo.toml" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml")]) + } - assert_eq!( - apply(raw_call, "raw_call", "select package.version"), - build(b::call( - b::bare("select"), - vec![b::sp(), b::bare("package.version")] - )) - ); + equal_tokens! { + + "select package.version" -> b::token_list(vec![b::bare("select"), b::sp(), b::bare("package"), b::op("."), b::bare("version")]) + } - assert_eq!( - apply(raw_call, "raw_call", "echo $it"), - build(b::call(b::bare("echo"), vec![b::sp(), b::var("it")])) - ); + equal_tokens! { + + "echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::var("it")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml --raw"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml"), b::sp(), b::flag("raw")] - )) - ); + equal_tokens! { + + "open Cargo.toml --raw" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml"), b::sp(), b::flag("raw")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml -r"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml"), b::sp(), b::shorthand("r")] - )) - ); + equal_tokens! { + + "open Cargo.toml -r" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml"), b::sp(), b::shorthand("r")]) + } - assert_eq!( - apply(raw_call, "raw_call", "config --set tabs 2"), - build(b::call( - b::bare("config"), - vec![ - b::sp(), - b::flag("set"), - b::sp(), - b::bare("tabs"), - b::sp(), - b::int(2) - ] - )) - ); + equal_tokens! { + + "config --set tabs 2" -> b::token_list(vec![b::bare("config"), b::sp(), b::flag("set"), b::sp(), b::bare("tabs"), b::sp(), b::int(2)]) + } } #[test] @@ -1181,162 +1079,189 @@ mod tests { equal_tokens!( "cargo +nightly run" -> - b::pipeline(vec![( - None, - b::call( - b::bare("cargo"), - vec![ - b::sp(), - b::external_word("+nightly"), - b::sp(), - b::bare("run") - ] - ), - None - )]) + b::pipeline(vec![vec![ + b::bare("cargo"), + b::sp(), + b::external_word("+nightly"), + b::sp(), + b::bare("run") + ]]) ); equal_tokens!( "rm foo%bar" -> - b::pipeline(vec![( - None, - b::call(b::bare("rm"), vec![b::sp(), b::external_word("foo%bar"),]), - None - )]) + b::pipeline(vec![vec![ + b::bare("rm"), b::sp(), b::external_word("foo%bar") + ]]) ); equal_tokens!( "rm foo%bar" -> - b::pipeline(vec![( - None, - b::call(b::bare("rm"), vec![b::sp(), b::external_word("foo%bar"),]), - None - )]) + b::pipeline(vec![vec![ + b::bare("rm"), b::sp(), b::external_word("foo%bar"), + ]]) ); } #[test] - fn test_smoke_pipeline() { + fn test_pipeline() { let _ = pretty_env_logger::try_init(); - assert_eq!( - apply( - pipeline, - "pipeline", - r#"git branch --merged | split-row "`n" | where $it != "* master""# - ), - build_token(b::pipeline(vec![ - ( - None, - b::call( - b::bare("git"), - vec![b::sp(), b::bare("branch"), b::sp(), b::flag("merged")] - ), - Some(" ") - ), - ( - Some(" "), - b::call(b::bare("split-row"), vec![b::sp(), b::string("`n")]), - Some(" ") - ), - ( - Some(" "), - b::call( - b::bare("where"), - vec![ - b::sp(), - b::var("it"), - b::sp(), - b::op("!="), - b::sp(), - b::string("* master") - ] - ), - None - ) - ])) - ); - - assert_eq!( - apply(pipeline, "pipeline", "ls | where { $it.size > 100 }"), - build_token(b::pipeline(vec![ - (None, b::call(b::bare("ls"), vec![]), Some(" ")), - ( - Some(" "), - b::call( - b::bare("where"), - vec![ - b::sp(), - b::braced(vec![ - b::path(b::var("it"), vec![b::member("size")]), - b::sp(), - b::op(">"), - b::sp(), - b::int(100) - ]) - ] - ), - None - ) - ])) - ) - } - - fn apply( - f: impl Fn(NomSpan) -> Result<(NomSpan, T), nom::Err<(NomSpan, nom::error::ErrorKind)>>, - desc: &str, - string: &str, - ) -> T { - match f(NomSpan::new_extra(string, uuid::Uuid::nil())) { - Ok(v) => v.1, - Err(other) => { - println!("{:?}", other); - println!("for {} @ {}", string, desc); - panic!("No dice"); - } + equal_tokens! { + "sys | echo" -> b::pipeline(vec![ + vec![ + b::bare("sys"), b::sp() + ], + vec![ + b::sp(), b::bare("echo") + ] + ]) } } - fn tag(left: usize, right: usize) -> Tag { - Tag::from((left, right, uuid::Uuid::nil())) + #[test] + fn test_patterns() { + equal_tokens! { + + "cp ../formats/*" -> b::pipeline(vec![vec![b::bare("cp"), b::ws(" "), b::op("."), b::op("."), b::pattern("/formats/*")]]) + } + + equal_tokens! { + + "cp * /dev/null" -> b::pipeline(vec![vec![b::bare("cp"), b::ws(" "), b::pattern("*"), b::ws(" "), b::bare("/dev/null")]]) + } + } + + // #[test] + // fn test_pseudo_paths() { + // let _ = pretty_env_logger::try_init(); + + // equal_tokens!( + // r#"sys | where cpu."max ghz" > 1"# -> + // b::pipeline(vec![ + // (None, b::call(b::bare("sys"), vec![]), Some(" ")), + // ( + // Some(" "), + // b::call( + // b::bare("where"), + // vec![ + // b::sp(), + // b::path(b::bare("cpu"), vec![b::string("max ghz")]), + // b::sp(), + // b::op(">"), + // b::sp(), + // b::int(1) + // ] + // ), + // None + // ) + // ]) + // ); + // } + + // #[test] + // fn test_smoke_pipeline() { + // let _ = pretty_env_logger::try_init(); + + // assert_eq!( + // apply( + // pipeline, + // "pipeline", + // r#"git branch --merged | split-row "`n" | where $it != "* master""# + // ), + // build_token(b::pipeline(vec![ + // ( + // None, + // b::call( + // b::bare("git"), + // vec![b::sp(), b::bare("branch"), b::sp(), b::flag("merged")] + // ), + // Some(" ") + // ), + // ( + // Some(" "), + // b::call(b::bare("split-row"), vec![b::sp(), b::string("`n")]), + // Some(" ") + // ), + // ( + // Some(" "), + // b::call( + // b::bare("where"), + // vec![ + // b::sp(), + // b::var("it"), + // b::sp(), + // b::op("!="), + // b::sp(), + // b::string("* master") + // ] + // ), + // None + // ) + // ])) + // ); + + // assert_eq!( + // apply(pipeline, "pipeline", "ls | where { $it.size > 100 }"), + // build_token(b::pipeline(vec![ + // (None, b::call(b::bare("ls"), vec![]), Some(" ")), + // ( + // Some(" "), + // b::call( + // b::bare("where"), + // vec![ + // b::sp(), + // b::braced(vec![ + // b::path(b::var("it"), vec![b::member("size")]), + // b::sp(), + // b::op(">"), + // b::sp(), + // b::int(100) + // ]) + // ] + // ), + // None + // ) + // ])) + // ) + // } + + fn apply( + f: impl Fn(NomSpan) -> Result<(NomSpan, TokenNode), nom::Err<(NomSpan, nom::error::ErrorKind)>>, + desc: &str, + string: &str, + ) -> TokenNode { + f(nom_input(string)).unwrap().1 + } + + fn span((left, right): (usize, usize)) -> Span { + Span::new(left, right) } fn delimited( - delimiter: Tagged, + delimiter: Spanned, children: Vec, left: usize, right: usize, ) -> TokenNode { - let node = DelimitedNode::new(*delimiter, children); - let spanned = node.tagged((left, right, delimiter.tag.anchor)); + let start = Span::for_char(left); + let end = Span::for_char(right); + + let node = DelimitedNode::new(delimiter.item, (start, end), children); + let spanned = node.spanned(Span::new(left, right)); TokenNode::Delimited(spanned) } - fn path(head: TokenNode, tail: Vec, left: usize, right: usize) -> TokenNode { - let tag = head.tag(); - - let node = PathNode::new( - Box::new(head), - tail.into_iter().map(TokenNode::Token).collect(), - ); - let spanned = node.tagged((left, right, tag.anchor)); - TokenNode::Path(spanned) - } - fn token(token: RawToken, left: usize, right: usize) -> TokenNode { - TokenNode::Token(token.tagged((left, right, uuid::Uuid::nil()))) + TokenNode::Token(token.spanned(Span::new(left, right))) } fn build(block: CurriedNode) -> T { - let mut builder = TokenTreeBuilder::new(uuid::Uuid::nil()); + let mut builder = TokenTreeBuilder::new(); block(&mut builder) } fn build_token(block: CurriedToken) -> TokenNode { - TokenTreeBuilder::build(uuid::Uuid::nil(), block).0 - } - - fn test_uuid() -> uuid::Uuid { - uuid::Uuid::nil() + TokenTreeBuilder::build(block).0 } } diff --git a/src/parser/parse/pipeline.rs b/src/parser/parse/pipeline.rs index 42bbe23a1..73db73807 100644 --- a/src/parser/parse/pipeline.rs +++ b/src/parser/parse/pipeline.rs @@ -1,37 +1,30 @@ -use crate::parser::CallNode; +use crate::parser::TokenNode; use crate::traits::ToDebug; -use crate::{Tag, Tagged}; +use crate::{Span, Spanned}; use derive_new::new; use getset::Getters; use std::fmt; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)] pub struct Pipeline { - pub(crate) parts: Vec, - pub(crate) post_ws: Option, + pub(crate) parts: Vec>, + // pub(crate) post_ws: Option, } impl ToDebug for Pipeline { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - for part in &self.parts { + for part in self.parts.iter() { write!(f, "{}", part.debug(source))?; } - if let Some(post_ws) = self.post_ws { - write!(f, "{}", post_ws.slice(source))? - } - Ok(()) } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] pub struct PipelineElement { - pub pipe: Option, - pub pre_ws: Option, - #[get = "pub(crate)"] - call: Tagged, - pub post_ws: Option, + pub pipe: Option, + pub tokens: Spanned>, } impl ToDebug for PipelineElement { @@ -40,14 +33,8 @@ impl ToDebug for PipelineElement { write!(f, "{}", pipe.slice(source))?; } - if let Some(pre_ws) = self.pre_ws { - write!(f, "{}", pre_ws.slice(source))?; - } - - write!(f, "{}", self.call.debug(source))?; - - if let Some(post_ws) = self.post_ws { - write!(f, "{}", post_ws.slice(source))?; + for token in &self.tokens.item { + write!(f, "{}", token.debug(source))?; } Ok(()) diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index e0072360e..c3c1df652 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -1,9 +1,9 @@ use crate::errors::ShellError; use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*}; +use crate::prelude::*; use crate::traits::ToDebug; -use crate::{Tag, Tagged, Text}; +use crate::{Tagged, Text}; use derive_new::new; -use enum_utils::FromStr; use getset::Getters; use std::fmt; @@ -11,16 +11,14 @@ use std::fmt; pub enum TokenNode { Token(Token), - Call(Tagged), - Delimited(Tagged), - Pipeline(Tagged), - Operator(Tagged), - Flag(Tagged), - Member(Tag), - Whitespace(Tag), + Call(Spanned), + Nodes(Spanned>), + Delimited(Spanned), + Pipeline(Spanned), + Flag(Spanned), + Whitespace(Span), - Error(Tagged>), - Path(Tagged), + Error(Spanned), } impl ToDebug for TokenNode { @@ -78,48 +76,51 @@ impl fmt::Debug for DebugTokenNode<'_> { ) } TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)), - TokenNode::Error(s) => write!(f, " for {:?}", s.tag().slice(self.source)), - rest => write!(f, "{}", rest.tag().slice(self.source)), + TokenNode::Error(_) => write!(f, ""), + rest => write!(f, "{}", rest.span().slice(self.source)), } } } -impl From<&TokenNode> for Tag { - fn from(token: &TokenNode) -> Tag { - token.tag() +impl From<&TokenNode> for Span { + fn from(token: &TokenNode) -> Span { + token.span() } } impl TokenNode { - pub fn tag(&self) -> Tag { + pub fn span(&self) -> Span { match self { - TokenNode::Token(t) => t.tag(), - TokenNode::Call(s) => s.tag(), - TokenNode::Delimited(s) => s.tag(), - TokenNode::Pipeline(s) => s.tag(), - TokenNode::Operator(s) => s.tag(), - TokenNode::Flag(s) => s.tag(), - TokenNode::Member(s) => *s, + TokenNode::Token(t) => t.span, + TokenNode::Nodes(t) => t.span, + TokenNode::Call(s) => s.span, + TokenNode::Delimited(s) => s.span, + TokenNode::Pipeline(s) => s.span, + TokenNode::Flag(s) => s.span, TokenNode::Whitespace(s) => *s, - TokenNode::Error(s) => s.tag(), - TokenNode::Path(s) => s.tag(), + TokenNode::Error(s) => s.span, } } - pub fn type_name(&self) -> String { + pub fn type_name(&self) -> &'static str { match self { TokenNode::Token(t) => t.type_name(), + TokenNode::Nodes(_) => "nodes", TokenNode::Call(_) => "command", TokenNode::Delimited(d) => d.type_name(), TokenNode::Pipeline(_) => "pipeline", - TokenNode::Operator(_) => "operator", TokenNode::Flag(_) => "flag", - TokenNode::Member(_) => "member", TokenNode::Whitespace(_) => "whitespace", TokenNode::Error(_) => "error", - TokenNode::Path(_) => "path", } - .to_string() + } + + pub fn spanned_type_name(&self) -> Spanned<&'static str> { + self.type_name().spanned(self.span()) + } + + pub fn tagged_type_name(&self) -> Tagged<&'static str> { + self.type_name().tagged(self.span()) } pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> { @@ -127,16 +128,26 @@ impl TokenNode { } pub fn as_external_arg(&self, source: &Text) -> String { - self.tag().slice(source).to_string() + self.span().slice(source).to_string() } pub fn source<'a>(&self, source: &'a Text) -> &'a str { - self.tag().slice(source) + self.span().slice(source) + } + + pub fn get_variable(&self) -> Result<(Span, Span), ShellError> { + match self { + TokenNode::Token(Spanned { + item: RawToken::Variable(inner_span), + span: outer_span, + }) => Ok((*outer_span, *inner_span)), + _ => Err(ShellError::type_error("variable", self.tagged_type_name())), + } } pub fn is_bare(&self) -> bool { match self { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, .. }) => true, @@ -144,9 +155,44 @@ impl TokenNode { } } + pub fn is_pattern(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::GlobPattern, + .. + }) => true, + _ => false, + } + } + + pub fn is_dot(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + .. + }) => true, + _ => false, + } + } + + pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> { + match self { + TokenNode::Delimited(Spanned { + item: + DelimitedNode { + delimiter, + children, + spans, + }, + span, + }) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)), + _ => None, + } + } + pub fn is_external(&self) -> bool { match self { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::ExternalCommand(..), .. }) => true, @@ -154,20 +200,20 @@ impl TokenNode { } } - pub fn expect_external(&self) -> Tag { + pub fn expect_external(&self) -> Span { match self { - TokenNode::Token(Tagged { - item: RawToken::ExternalCommand(tag), + TokenNode::Token(Spanned { + item: RawToken::ExternalCommand(span), .. - }) => *tag, + }) => *span, _ => panic!("Only call expect_external if you checked is_external first"), } } - pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option> { + pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option> { match self { TokenNode::Flag( - flag @ Tagged { + flag @ Spanned { item: Flag { .. }, .. }, ) if value == flag.name().slice(source) => Some(*flag), @@ -177,8 +223,58 @@ impl TokenNode { pub fn as_pipeline(&self) -> Result { match self { - TokenNode::Pipeline(Tagged { item, .. }) => Ok(item.clone()), - _ => Err(ShellError::string("unimplemented")), + TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()), + _ => Err(ShellError::unimplemented("unimplemented")), + } + } + + pub fn is_whitespace(&self) -> bool { + match self { + TokenNode::Whitespace(_) => true, + _ => false, + } + } + + pub fn expect_string(&self) -> (Span, Span) { + match self { + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span: outer_span, + }) => (*outer_span, *inner_span), + other => panic!("Expected string, found {:?}", other), + } + } +} + +#[cfg(test)] +impl TokenNode { + pub fn expect_list(&self) -> Tagged<&[TokenNode]> { + match self { + TokenNode::Nodes(Spanned { item, span }) => (&item[..]).tagged(Tag { + span: *span, + anchor: None, + }), + other => panic!("Expected list, found {:?}", other), + } + } + + pub fn expect_var(&self) -> (Span, Span) { + match self { + TokenNode::Token(Spanned { + item: RawToken::Variable(inner_span), + span: outer_span, + }) => (*outer_span, *inner_span), + other => panic!("Expected var, found {:?}", other), + } + } + + pub fn expect_bare(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => *span, + other => panic!("Expected var, found {:?}", other), } } } @@ -186,8 +282,9 @@ impl TokenNode { #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[get = "pub(crate)"] pub struct DelimitedNode { - delimiter: Delimiter, - children: Vec, + pub(crate) delimiter: Delimiter, + pub(crate) spans: (Span, Span), + pub(crate) children: Vec, } impl DelimitedNode { @@ -200,13 +297,31 @@ impl DelimitedNode { } } -#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, FromStr)] +#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum Delimiter { Paren, Brace, Square, } +impl Delimiter { + pub(crate) fn open(&self) -> &'static str { + match self { + Delimiter::Paren => "(", + Delimiter::Brace => "{", + Delimiter::Square => "[", + } + } + + pub(crate) fn close(&self) -> &'static str { + match self { + Delimiter::Paren => ")", + Delimiter::Brace => "}", + Delimiter::Square => "]", + } + } +} + #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[get = "pub(crate)"] pub struct PathNode { diff --git a/src/parser/parse/token_tree_builder.rs b/src/parser/parse/token_tree_builder.rs index 9a2e6ab72..891e6b9e1 100644 --- a/src/parser/parse/token_tree_builder.rs +++ b/src/parser/parse/token_tree_builder.rs @@ -3,12 +3,10 @@ use crate::prelude::*; use crate::parser::parse::flag::{Flag, FlagKind}; use crate::parser::parse::operator::Operator; use crate::parser::parse::pipeline::{Pipeline, PipelineElement}; -use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode}; +use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; use crate::parser::parse::tokens::{RawNumber, RawToken}; -use crate::parser::parse::unit::Unit; use crate::parser::CallNode; use derive_new::new; -use uuid::Uuid; #[derive(new)] pub struct TokenTreeBuilder { @@ -17,74 +15,86 @@ pub struct TokenTreeBuilder { #[new(default)] output: String, - - anchor: Uuid, } pub type CurriedToken = Box TokenNode + 'static>; pub type CurriedCall = Box Tagged + 'static>; impl TokenTreeBuilder { - pub fn build(anchor: Uuid, block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { - let mut builder = TokenTreeBuilder::new(anchor); + pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { + let mut builder = TokenTreeBuilder::new(); let node = block(&mut builder); (node, builder.output) } - pub fn pipeline(input: Vec<(Option<&str>, CurriedCall, Option<&str>)>) -> CurriedToken { - let input: Vec<(Option, CurriedCall, Option)> = input - .into_iter() - .map(|(pre, call, post)| { - ( - pre.map(|s| s.to_string()), - call, - post.map(|s| s.to_string()), - ) - }) - .collect(); + fn build_spanned( + &mut self, + callback: impl FnOnce(&mut TokenTreeBuilder) -> T, + ) -> Spanned { + let start = self.pos; + let ret = callback(self); + let end = self.pos; + ret.spanned(Span::new(start, end)) + } + + pub fn pipeline(input: Vec>) -> CurriedToken { Box::new(move |b| { let start = b.pos; - let mut out: Vec = vec![]; + let mut out: Vec> = vec![]; let mut input = input.into_iter().peekable(); - let (pre, call, post) = input + let head = input .next() .expect("A pipeline must contain at least one element"); let pipe = None; - let pre_tag = pre.map(|pre| b.consume_tag(&pre)); - let call = call(b); - let post_tag = post.map(|post| b.consume_tag(&post)); + let head = b.build_spanned(|b| head.into_iter().map(|node| node(b)).collect()); - out.push(PipelineElement::new(pipe, pre_tag, call, post_tag)); + let head_span: Span = head.span; + out.push(PipelineElement::new(pipe, head).spanned(head_span)); loop { match input.next() { None => break, - Some((pre, call, post)) => { - let pipe = Some(b.consume_tag("|")); - let pre_span = pre.map(|pre| b.consume_tag(&pre)); - let call = call(b); - let post_span = post.map(|post| b.consume_tag(&post)); + Some(node) => { + let start = b.pos; + let pipe = Some(b.consume_span("|")); + let node = + b.build_spanned(|b| node.into_iter().map(|node| node(b)).collect()); + let end = b.pos; - out.push(PipelineElement::new(pipe, pre_span, call, post_span)); + out.push(PipelineElement::new(pipe, node).spanned(Span::new(start, end))); } } } let end = b.pos; - TokenTreeBuilder::tagged_pipeline((out, None), (start, end, b.anchor)) + TokenTreeBuilder::spanned_pipeline(out, Span::new(start, end)) }) } - pub fn tagged_pipeline( - input: (Vec, Option), - tag: impl Into, + pub fn spanned_pipeline( + input: Vec>, + span: impl Into, ) -> TokenNode { - TokenNode::Pipeline(Pipeline::new(input.0, input.1.into()).tagged(tag.into())) + TokenNode::Pipeline(Pipeline::new(input).spanned(span)) + } + + pub fn token_list(input: Vec) -> CurriedToken { + Box::new(move |b| { + let start = b.pos; + let tokens = input.into_iter().map(|i| i(b)).collect(); + let end = b.pos; + + TokenTreeBuilder::tagged_token_list(tokens, (start, end, None)) + }) + } + + pub fn tagged_token_list(input: Vec, tag: impl Into) -> TokenNode { + TokenNode::Nodes(input.spanned(tag.into().span)) } pub fn op(input: impl Into) -> CurriedToken { @@ -95,12 +105,12 @@ impl TokenTreeBuilder { b.pos = end; - TokenTreeBuilder::tagged_op(input, (start, end, b.anchor)) + TokenTreeBuilder::spanned_op(input, Span::new(start, end)) }) } - pub fn tagged_op(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Operator(input.into().tagged(tag.into())) + pub fn spanned_op(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Operator(input.into()).spanned(span.into())) } pub fn string(input: impl Into) -> CurriedToken { @@ -112,15 +122,15 @@ impl TokenTreeBuilder { let (_, end) = b.consume("\""); b.pos = end; - TokenTreeBuilder::tagged_string( - (inner_start, inner_end, b.anchor), - (start, end, b.anchor), + TokenTreeBuilder::spanned_string( + Span::new(inner_start, inner_end), + Span::new(start, end), ) }) } - pub fn tagged_string(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::String(input.into()).tagged(tag.into())) + pub fn spanned_string(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::String(input.into()).spanned(span.into())) } pub fn bare(input: impl Into) -> CurriedToken { @@ -130,12 +140,12 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::tagged_bare((start, end, b.anchor)) + TokenTreeBuilder::spanned_bare(Span::new(start, end)) }) } - pub fn tagged_bare(tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::Bare.tagged(tag.into())) + pub fn spanned_bare(span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Bare.spanned(span)) } pub fn pattern(input: impl Into) -> CurriedToken { @@ -145,12 +155,12 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::tagged_pattern((start, end, b.anchor)) + TokenTreeBuilder::spanned_pattern(Span::new(start, end)) }) } - pub fn tagged_pattern(input: impl Into) -> TokenNode { - TokenNode::Token(RawToken::GlobPattern.tagged(input.into())) + pub fn spanned_pattern(input: impl Into) -> TokenNode { + TokenNode::Token(RawToken::GlobPattern.spanned(input.into())) } pub fn external_word(input: impl Into) -> CurriedToken { @@ -160,16 +170,31 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::tagged_external_word((start, end, b.anchor)) + TokenTreeBuilder::spanned_external_word(Span::new(start, end)) }) } - pub fn tagged_external_word(input: impl Into) -> TokenNode { - TokenNode::Token(RawToken::ExternalWord.tagged(input.into())) + pub fn spanned_external_word(input: impl Into) -> TokenNode { + TokenNode::Token(RawToken::ExternalWord.spanned(input.into())) } - pub fn tagged_external(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::ExternalCommand(input.into()).tagged(tag.into())) + pub fn external_command(input: impl Into) -> CurriedToken { + let input = input.into(); + + Box::new(move |b| { + let (outer_start, _) = b.consume("^"); + let (inner_start, end) = b.consume(&input); + b.pos = end; + + TokenTreeBuilder::spanned_external_command( + Span::new(inner_start, end), + Span::new(outer_start, end), + ) + }) + } + + pub fn spanned_external_command(inner: impl Into, outer: impl Into) -> TokenNode { + TokenNode::Token(RawToken::ExternalCommand(inner.into()).spanned(outer.into())) } pub fn int(input: impl Into) -> CurriedToken { @@ -179,9 +204,9 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&int.to_string()); b.pos = end; - TokenTreeBuilder::tagged_number( - RawNumber::Int((start, end, b.anchor).into()), - (start, end, b.anchor), + TokenTreeBuilder::spanned_number( + RawNumber::Int(Span::new(start, end)), + Span::new(start, end), ) }) } @@ -193,63 +218,15 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&decimal.to_string()); b.pos = end; - TokenTreeBuilder::tagged_number( - RawNumber::Decimal((start, end, b.anchor).into()), - (start, end, b.anchor), + TokenTreeBuilder::spanned_number( + RawNumber::Decimal(Span::new(start, end)), + Span::new(start, end), ) }) } - pub fn tagged_number(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::Number(input.into()).tagged(tag.into())) - } - - pub fn size(int: impl Into, unit: impl Into) -> CurriedToken { - let int = int.into(); - let unit = unit.into(); - - Box::new(move |b| { - let (start_int, end_int) = b.consume(&int.to_string()); - let (_, end_unit) = b.consume(unit.as_str()); - b.pos = end_unit; - - TokenTreeBuilder::tagged_size( - (RawNumber::Int((start_int, end_int, b.anchor).into()), unit), - (start_int, end_unit, b.anchor), - ) - }) - } - - pub fn tagged_size( - input: (impl Into, impl Into), - tag: impl Into, - ) -> TokenNode { - let (int, unit) = (input.0.into(), input.1.into()); - - TokenNode::Token(RawToken::Size(int, unit).tagged(tag.into())) - } - - pub fn path(head: CurriedToken, tail: Vec) -> CurriedToken { - Box::new(move |b| { - let start = b.pos; - let head = head(b); - - let mut output = vec![]; - - for item in tail { - b.consume("."); - - output.push(item(b)); - } - - let end = b.pos; - - TokenTreeBuilder::tagged_path((head, output), (start, end, b.anchor)) - }) - } - - pub fn tagged_path(input: (TokenNode, Vec), tag: impl Into) -> TokenNode { - TokenNode::Path(PathNode::new(Box::new(input.0), input.1).tagged(tag.into())) + pub fn spanned_number(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Number(input.into()).spanned(span.into())) } pub fn var(input: impl Into) -> CurriedToken { @@ -259,12 +236,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("$"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::tagged_var((inner_start, end, b.anchor), (start, end, b.anchor)) + TokenTreeBuilder::spanned_var(Span::new(inner_start, end), Span::new(start, end)) }) } - pub fn tagged_var(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Token(RawToken::Variable(input.into()).tagged(tag.into())) + pub fn spanned_var(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Variable(input.into()).spanned(span.into())) } pub fn flag(input: impl Into) -> CurriedToken { @@ -274,12 +251,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("--"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::tagged_flag((inner_start, end, b.anchor), (start, end, b.anchor)) + TokenTreeBuilder::spanned_flag(Span::new(inner_start, end), Span::new(start, end)) }) } - pub fn tagged_flag(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).tagged(tag.into())) + pub fn spanned_flag(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).spanned(span.into())) } pub fn shorthand(input: impl Into) -> CurriedToken { @@ -289,25 +266,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("-"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::tagged_shorthand((inner_start, end, b.anchor), (start, end, b.anchor)) + TokenTreeBuilder::spanned_shorthand((inner_start, end), (start, end)) }) } - pub fn tagged_shorthand(input: impl Into, tag: impl Into) -> TokenNode { - TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).tagged(tag.into())) - } - - pub fn member(input: impl Into) -> CurriedToken { - let input = input.into(); - - Box::new(move |b| { - let (start, end) = b.consume(&input); - TokenTreeBuilder::tagged_member((start, end, b.anchor)) - }) - } - - pub fn tagged_member(tag: impl Into) -> TokenNode { - TokenNode::Member(tag.into()) + pub fn spanned_shorthand(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).spanned(span.into())) } pub fn call(head: CurriedToken, input: Vec) -> CurriedCall { @@ -323,7 +287,7 @@ impl TokenTreeBuilder { let end = b.pos; - TokenTreeBuilder::tagged_call(nodes, (start, end, b.anchor)) + TokenTreeBuilder::tagged_call(nodes, (start, end, None)) }) } @@ -340,64 +304,85 @@ impl TokenTreeBuilder { CallNode::new(Box::new(head), tail).tagged(tag.into()) } + fn consume_delimiter( + &mut self, + input: Vec, + _open: &str, + _close: &str, + ) -> (Span, Span, Span, Vec) { + let (start_open_paren, end_open_paren) = self.consume("("); + let mut output = vec![]; + for item in input { + output.push(item(self)); + } + + let (start_close_paren, end_close_paren) = self.consume(")"); + + let open = Span::new(start_open_paren, end_open_paren); + let close = Span::new(start_close_paren, end_close_paren); + let whole = Span::new(start_open_paren, end_close_paren); + + (open, close, whole, output) + } + pub fn parens(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("("); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, output) = b.consume_delimiter(input, "(", ")"); - let (_, end) = b.consume(")"); - - TokenTreeBuilder::tagged_parens(output, (start, end, b.anchor)) + TokenTreeBuilder::spanned_parens(output, (open, close), whole) }) } - pub fn tagged_parens(input: impl Into>, tag: impl Into) -> TokenNode { - TokenNode::Delimited(DelimitedNode::new(Delimiter::Paren, input.into()).tagged(tag.into())) + pub fn spanned_parens( + input: impl Into>, + spans: (Span, Span), + span: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()), + ) } pub fn square(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("["); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]"); - let (_, end) = b.consume("]"); - - TokenTreeBuilder::tagged_square(output, (start, end, b.anchor)) + TokenTreeBuilder::spanned_square(tokens, (open, close), whole) }) } - pub fn tagged_square(input: impl Into>, tag: impl Into) -> TokenNode { - TokenNode::Delimited(DelimitedNode::new(Delimiter::Square, input.into()).tagged(tag.into())) + pub fn spanned_square( + input: impl Into>, + spans: (Span, Span), + span: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()), + ) } pub fn braced(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("{ "); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}"); - let (_, end) = b.consume(" }"); - - TokenTreeBuilder::tagged_brace(output, (start, end, b.anchor)) + TokenTreeBuilder::spanned_brace(tokens, (open, close), whole) }) } - pub fn tagged_brace(input: impl Into>, tag: impl Into) -> TokenNode { - TokenNode::Delimited(DelimitedNode::new(Delimiter::Brace, input.into()).tagged(tag.into())) + pub fn spanned_brace( + input: impl Into>, + spans: (Span, Span), + span: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()), + ) } pub fn sp() -> CurriedToken { Box::new(|b| { let (start, end) = b.consume(" "); - TokenNode::Whitespace(Tag::from((start, end, b.anchor))) + TokenNode::Whitespace(Span::new(start, end)) }) } @@ -406,12 +391,12 @@ impl TokenTreeBuilder { Box::new(move |b| { let (start, end) = b.consume(&input); - TokenTreeBuilder::tagged_ws((start, end, b.anchor)) + TokenTreeBuilder::spanned_ws(Span::new(start, end)) }) } - pub fn tagged_ws(tag: impl Into) -> TokenNode { - TokenNode::Whitespace(tag.into()) + pub fn spanned_ws(span: impl Into) -> TokenNode { + TokenNode::Whitespace(span.into()) } fn consume(&mut self, input: &str) -> (usize, usize) { @@ -421,10 +406,10 @@ impl TokenTreeBuilder { (start, self.pos) } - fn consume_tag(&mut self, input: &str) -> Tag { + fn consume_span(&mut self, input: &str) -> Span { let start = self.pos; self.pos += input.len(); self.output.push_str(input); - (start, self.pos, self.anchor).into() + Span::new(start, self.pos) } } diff --git a/src/parser/parse/tokens.rs b/src/parser/parse/tokens.rs index d796a8fcb..94955d84d 100644 --- a/src/parser/parse/tokens.rs +++ b/src/parser/parse/tokens.rs @@ -1,38 +1,53 @@ -use crate::parser::parse::unit::*; +use crate::parser::Operator; use crate::prelude::*; -use crate::{Tagged, Text}; +use crate::Text; use std::fmt; use std::str::FromStr; #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum RawToken { Number(RawNumber), - Size(RawNumber, Unit), - String(Tag), - Variable(Tag), - ExternalCommand(Tag), + Operator(Operator), + String(Span), + Variable(Span), + ExternalCommand(Span), ExternalWord, GlobPattern, Bare, } +impl RawToken { + pub fn type_name(&self) -> &'static str { + match self { + RawToken::Number(_) => "Number", + RawToken::Operator(..) => "operator", + RawToken::String(_) => "String", + RawToken::Variable(_) => "variable", + RawToken::ExternalCommand(_) => "external command", + RawToken::ExternalWord => "external word", + RawToken::GlobPattern => "glob pattern", + RawToken::Bare => "String", + } + } +} + #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum RawNumber { - Int(Tag), - Decimal(Tag), + Int(Span), + Decimal(Span), } impl RawNumber { - pub fn int(tag: impl Into) -> Tagged { - let tag = tag.into(); + pub fn int(span: impl Into) -> Spanned { + let span = span.into(); - RawNumber::Int(tag).tagged(tag) + RawNumber::Int(span).spanned(span) } - pub fn decimal(tag: impl Into) -> Tagged { - let tag = tag.into(); + pub fn decimal(span: impl Into) -> Spanned { + let span = span.into(); - RawNumber::Decimal(tag).tagged(tag) + RawNumber::Decimal(span).spanned(span) } pub(crate) fn to_number(self, source: &Text) -> Number { @@ -45,22 +60,7 @@ impl RawNumber { } } -impl RawToken { - pub fn type_name(&self) -> &'static str { - match self { - RawToken::Number(_) => "Number", - RawToken::Size(..) => "Size", - RawToken::String(_) => "String", - RawToken::Variable(_) => "Variable", - RawToken::ExternalCommand(_) => "ExternalCommand", - RawToken::ExternalWord => "ExternalWord", - RawToken::GlobPattern => "GlobPattern", - RawToken::Bare => "String", - } - } -} - -pub type Token = Tagged; +pub type Token = Spanned; impl Token { pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> { @@ -69,6 +69,76 @@ impl Token { source, } } + + pub fn extract_number(&self) -> Option> { + match self.item { + RawToken::Number(number) => Some((number).spanned(self.span)), + _ => None, + } + } + + pub fn extract_int(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::Number(RawNumber::Int(int)) => Some((int, self.span)), + _ => None, + } + } + + pub fn extract_decimal(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.span)), + _ => None, + } + } + + pub fn extract_operator(&self) -> Option> { + match self.item { + RawToken::Operator(operator) => Some(operator.spanned(self.span)), + _ => None, + } + } + + pub fn extract_string(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::String(span) => Some((span, self.span)), + _ => None, + } + } + + pub fn extract_variable(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::Variable(span) => Some((span, self.span)), + _ => None, + } + } + + pub fn extract_external_command(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::ExternalCommand(span) => Some((span, self.span)), + _ => None, + } + } + + pub fn extract_external_word(&self) -> Option { + match self.item { + RawToken::ExternalWord => Some(self.span), + _ => None, + } + } + + pub fn extract_glob_pattern(&self) -> Option { + match self.item { + RawToken::GlobPattern => Some(self.span), + _ => None, + } + } + + pub fn extract_bare(&self) -> Option { + match self.item { + RawToken::Bare => Some(self.span), + _ => None, + } + } } pub struct DebugToken<'a> { @@ -78,6 +148,6 @@ pub struct DebugToken<'a> { impl fmt::Debug for DebugToken<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.node.tag().slice(self.source)) + write!(f, "{}", self.node.span.slice(self.source)) } } diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index 36ba82f8e..935794f3c 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -1,92 +1,38 @@ -use crate::context::Context; use crate::errors::{ArgumentError, ShellError}; +use crate::parser::hir::syntax_shape::{ + color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced, + BackoffColoringMode, ColorSyntax, MaybeSpaceShape, +}; use crate::parser::registry::{NamedType, PositionalType, Signature}; -use crate::parser::{baseline_parse_tokens, CallNode}; +use crate::parser::TokensIterator; use crate::parser::{ - hir::{self, NamedArguments}, - Flag, RawToken, TokenNode, + hir::{self, ExpandContext, NamedArguments}, + Flag, }; use crate::traits::ToDebug; -use crate::{Tag, Tagged, TaggedItem, Text}; +use crate::{Span, Spanned, Tag, Text}; use log::trace; -pub fn parse_command( +pub fn parse_command_tail( config: &Signature, - context: &Context, - call: &Tagged, - source: &Text, -) -> Result { - let Tagged { item: raw_call, .. } = call; - - trace!("Processing {:?}", config); - - let head = parse_command_head(call.head())?; - - let children: Option> = raw_call.children().as_ref().map(|nodes| { - nodes - .iter() - .cloned() - .filter(|node| match node { - TokenNode::Whitespace(_) => false, - _ => true, - }) - .collect() - }); - - match parse_command_tail(&config, context, children, source, call.tag())? { - None => Ok(hir::Call::new(Box::new(head), None, None)), - Some((positional, named)) => Ok(hir::Call::new(Box::new(head), positional, named)), - } -} - -fn parse_command_head(head: &TokenNode) -> Result { - match head { - TokenNode::Token( - spanned @ Tagged { - item: RawToken::Bare, - .. - }, - ) => Ok(spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare))), - - TokenNode::Token(Tagged { - item: RawToken::String(inner_tag), - tag, - }) => Ok(hir::RawExpression::Literal(hir::Literal::String(*inner_tag)).tagged(*tag)), - - other => Err(ShellError::unexpected(&format!( - "command head -> {:?}", - other - ))), - } -} - -fn parse_command_tail( - config: &Signature, - context: &Context, - tail: Option>, - source: &Text, - command_tag: Tag, + context: &ExpandContext, + tail: &mut TokensIterator, + command_span: Span, ) -> Result>, Option)>, ShellError> { - let tail = &mut match &tail { - None => hir::TokensIterator::new(&[]), - Some(tail) => hir::TokensIterator::new(tail), - }; - let mut named = NamedArguments::new(); - - trace_remaining("nodes", tail.clone(), source); + trace_remaining("nodes", tail.clone(), context.source()); for (name, kind) in &config.named { trace!(target: "nu::parse", "looking for {} : {:?}", name, kind); match kind { NamedType::Switch => { - let flag = extract_switch(name, tail, source); + let flag = extract_switch(name, tail, context.source()); named.insert_switch(name, flag); } NamedType::Mandatory(syntax_type) => { - match extract_mandatory(config, name, tail, source, command_tag) { + match extract_mandatory(config, name, tail, context.source(), command_span) { Err(err) => return Err(err), // produce a correct diagnostic Ok((pos, flag)) => { tail.move_to(pos); @@ -95,46 +41,51 @@ fn parse_command_tail( return Err(ShellError::argument_error( config.name.clone(), ArgumentError::MissingValueForName(name.to_string()), - flag.tag(), + flag.span, )); } - let expr = - hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?; + let expr = expand_expr(&spaced(*syntax_type), tail, context)?; tail.restart(); named.insert_mandatory(name, expr); } } } - NamedType::Optional(syntax_type) => match extract_optional(name, tail, source) { - Err(err) => return Err(err), // produce a correct diagnostic - Ok(Some((pos, flag))) => { - tail.move_to(pos); + NamedType::Optional(syntax_type) => { + match extract_optional(name, tail, context.source()) { + Err(err) => return Err(err), // produce a correct diagnostic + Ok(Some((pos, flag))) => { + tail.move_to(pos); - if tail.at_end() { - return Err(ShellError::argument_error( - config.name.clone(), - ArgumentError::MissingValueForName(name.to_string()), - flag.tag(), - )); + if tail.at_end() { + return Err(ShellError::argument_error( + config.name.clone(), + ArgumentError::MissingValueForName(name.to_string()), + flag.span, + )); + } + + let expr = expand_expr(&spaced(*syntax_type), tail, context); + + match expr { + Err(_) => named.insert_optional(name, None), + Ok(expr) => named.insert_optional(name, Some(expr)), + } + + tail.restart(); } - let expr = hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?; - - tail.restart(); - named.insert_optional(name, Some(expr)); + Ok(None) => { + tail.restart(); + named.insert_optional(name, None); + } } - - Ok(None) => { - tail.restart(); - named.insert_optional(name, None); - } - }, + } }; } - trace_remaining("after named", tail.clone(), source); + trace_remaining("after named", tail.clone(), context.source()); let mut positional = vec![]; @@ -143,35 +94,49 @@ fn parse_command_tail( match arg { PositionalType::Mandatory(..) => { - if tail.len() == 0 { + if tail.at_end() { return Err(ShellError::argument_error( config.name.clone(), ArgumentError::MissingMandatoryPositional(arg.name().to_string()), - command_tag, + Tag { + span: command_span, + anchor: None, + }, )); } } PositionalType::Optional(..) => { - if tail.len() == 0 { + if tail.at_end() { break; } } } - let result = hir::baseline_parse_next_expr(tail, context, source, arg.syntax_type())?; + let result = expand_expr(&spaced(arg.syntax_type()), tail, context)?; positional.push(result); } - trace_remaining("after positional", tail.clone(), source); + trace_remaining("after positional", tail.clone(), context.source()); if let Some(syntax_type) = config.rest_positional { - let remainder = baseline_parse_tokens(tail, context, source, syntax_type)?; - positional.extend(remainder); + let mut out = vec![]; + + loop { + if tail.at_end_possible_ws() { + break; + } + + let next = expand_expr(&spaced(syntax_type), tail, context)?; + + out.push(next); + } + + positional.extend(out); } - trace_remaining("after rest", tail.clone(), source); + trace_remaining("after rest", tail.clone(), context.source()); trace!("Constructed positional={:?} named={:?}", positional, named); @@ -194,6 +159,232 @@ fn parse_command_tail( Ok(Some((positional, named))) } +#[derive(Debug)] +struct ColoringArgs { + vec: Vec>>>, +} + +impl ColoringArgs { + fn new(len: usize) -> ColoringArgs { + let vec = vec![None; len]; + ColoringArgs { vec } + } + + fn insert(&mut self, pos: usize, shapes: Vec>) { + self.vec[pos] = Some(shapes); + } + + fn spread_shapes(self, shapes: &mut Vec>) { + for item in self.vec { + match item { + None => {} + Some(vec) => { + shapes.extend(vec); + } + } + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandTailShape; + +impl ColorSyntax for CommandTailShape { + type Info = (); + type Input = Signature; + + fn color_syntax<'a, 'b>( + &self, + signature: &Signature, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + let mut args = ColoringArgs::new(token_nodes.len()); + trace_remaining("nodes", token_nodes.clone(), context.source()); + + for (name, kind) in &signature.named { + trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); + + match kind { + NamedType::Switch => { + match token_nodes.extract(|t| t.as_flag(name, context.source())) { + Some((pos, flag)) => args.insert(pos, vec![flag.color()]), + None => {} + } + } + NamedType::Mandatory(syntax_type) => { + match extract_mandatory( + signature, + name, + token_nodes, + context.source(), + Span::unknown(), + ) { + Err(_) => { + // The mandatory flag didn't exist at all, so there's nothing to color + } + Ok((pos, flag)) => { + let mut shapes = vec![flag.color()]; + token_nodes.move_to(pos); + + if token_nodes.at_end() { + args.insert(pos, shapes); + token_nodes.restart(); + continue; + } + + // We can live with unmatched syntax after a mandatory flag + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax( + syntax_type, + token_nodes, + context, + &mut shapes, + ) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + } + } + NamedType::Optional(syntax_type) => { + match extract_optional(name, token_nodes, context.source()) { + Err(_) => { + // The optional flag didn't exist at all, so there's nothing to color + } + Ok(Some((pos, flag))) => { + let mut shapes = vec![flag.color()]; + token_nodes.move_to(pos); + + if token_nodes.at_end() { + args.insert(pos, shapes); + token_nodes.restart(); + continue; + } + + // We can live with unmatched syntax after an optional flag + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax( + syntax_type, + token_nodes, + context, + &mut shapes, + ) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + + Ok(None) => { + token_nodes.restart(); + } + } + } + }; + } + + trace_remaining("after named", token_nodes.clone(), context.source()); + + for arg in &signature.positional { + trace!("Processing positional {:?}", arg); + + match arg { + PositionalType::Mandatory(..) => { + if token_nodes.at_end() { + break; + } + } + + PositionalType::Optional(..) => { + if token_nodes.at_end() { + break; + } + } + } + + let mut shapes = vec![]; + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + // We can live with an unmatched positional argument. Hopefully it will be + // matched by a future token + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax( + &arg.syntax_type(), + token_nodes, + context, + &mut shapes, + )?; + + args.insert(pos, shapes); + + Ok(()) + }); + } + } + } + + trace_remaining("after positional", token_nodes.clone(), context.source()); + + if let Some(syntax_type) = signature.rest_positional { + loop { + if token_nodes.at_end_possible_ws() { + break; + } + + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + let mut shapes = vec![]; + + // If any arguments don't match, we'll fall back to backoff coloring mode + let result = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax(&syntax_type, token_nodes, context, &mut shapes)?; + + args.insert(pos, shapes); + + Ok(()) + }); + + match result { + Err(_) => break, + Ok(_) => continue, + } + } + } + } + } + + args.spread_shapes(shapes); + + // Consume any remaining tokens with backoff coloring mode + color_syntax(&BackoffColoringMode, token_nodes, context, shapes); + + shapes.sort_by(|a, b| a.span.start().cmp(&b.span.start())); + } +} + fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option { tokens .extract(|t| t.as_flag(name, source)) @@ -205,15 +396,15 @@ fn extract_mandatory( name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text, - tag: Tag, -) -> Result<(usize, Tagged), ShellError> { + span: Span, +) -> Result<(usize, Spanned), ShellError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { None => Err(ShellError::argument_error( config.name.clone(), ArgumentError::MissingMandatoryFlag(name.to_string()), - tag, + span, )), Some((pos, flag)) => { @@ -227,7 +418,7 @@ fn extract_optional( name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text, -) -> Result<(Option<(usize, Tagged)>), ShellError> { +) -> Result<(Option<(usize, Spanned)>), ShellError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { @@ -241,6 +432,7 @@ fn extract_optional( pub fn trace_remaining(desc: &'static str, tail: hir::TokensIterator<'_>, source: &Text) { trace!( + target: "nu::expand_args", "{} = {:?}", desc, itertools::join( diff --git a/src/parser/registry.rs b/src/parser/registry.rs index 955a1a04c..790925e80 100644 --- a/src/parser/registry.rs +++ b/src/parser/registry.rs @@ -1,11 +1,11 @@ // TODO: Temporary redirect pub(crate) use crate::context::CommandRegistry; use crate::evaluate::{evaluate_baseline_expr, Scope}; -use crate::parser::{hir, hir::SyntaxShape, parse_command, CallNode}; +use crate::parser::{hir, hir::SyntaxShape}; use crate::prelude::*; use derive_new::new; use indexmap::IndexMap; -use log::trace; + use serde::{Deserialize, Serialize}; use std::fmt; @@ -271,21 +271,6 @@ impl<'a> Iterator for PositionalIter<'a> { } } -impl Signature { - pub(crate) fn parse_args( - &self, - call: &Tagged, - context: &Context, - source: &Text, - ) -> Result { - let args = parse_command(self, context, call, source)?; - - trace!("parsed args: {:?}", args); - - Ok(args) - } -} - pub(crate) fn evaluate_args( call: &hir::Call, registry: &CommandRegistry, @@ -313,7 +298,7 @@ pub(crate) fn evaluate_args( for (name, value) in n.named.iter() { match value { hir::named::NamedValue::PresentSwitch(tag) => { - results.insert(name.clone(), Value::boolean(true).tagged(*tag)); + results.insert(name.clone(), Value::boolean(true).tagged(tag)); } hir::named::NamedValue::Value(expr) => { results.insert( diff --git a/src/plugin.rs b/src/plugin.rs index afd987110..004e937fe 100644 --- a/src/plugin.rs +++ b/src/plugin.rs @@ -32,7 +32,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { let input = match input { Some(arg) => std::fs::read_to_string(arg), None => { - send_response(ShellError::string(format!("No input given."))); + send_response(ShellError::untagged_runtime_error("No input given.")); return; } }; @@ -64,7 +64,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { return; } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {} {:?}", input, e ))); @@ -102,7 +102,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { break; } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {} {:?}", input, e ))); @@ -111,7 +111,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { } } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {:?}", e, ))); diff --git a/src/plugins/add.rs b/src/plugins/add.rs index 03e1d4282..98cf3819b 100644 --- a/src/plugins/add.rs +++ b/src/plugins/add.rs @@ -1,10 +1,13 @@ +use itertools::Itertools; use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, Value, + serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape, + Tagged, TaggedItem, Value, }; +pub type ColumnPath = Vec>; + struct Add { - field: Option, + field: Option, value: Option, } impl Add { @@ -19,23 +22,30 @@ impl Add { let value_tag = value.tag(); match (value.item, self.value.clone()) { (obj @ Value::Row(_), Some(v)) => match &self.field { - Some(f) => match obj.insert_data_at_path(value_tag, &f, v) { + Some(f) => match obj.insert_data_at_column_path(value_tag.clone(), &f, v) { Some(v) => return Ok(v), None => { - return Err(ShellError::string(format!( - "add could not find place to insert field {:?} {}", - obj, f - ))) + return Err(ShellError::labeled_error( + format!( + "add could not find place to insert field {:?} {}", + obj, + f.iter().map(|i| &i.item).join(".") + ), + "column name", + &value_tag, + )) } }, - None => Err(ShellError::string( + None => Err(ShellError::labeled_error( "add needs a column name when adding a value to a table", + "column name", + value_tag, )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + (value, _) => Err(ShellError::type_error( + "row", + value.type_name().tagged(value_tag), + )), } } } @@ -44,7 +54,7 @@ impl Plugin for Add { fn config(&mut self) -> Result { Ok(Signature::build("add") .desc("Add a new field to the table.") - .required("Field", SyntaxShape::String) + .required("Field", SyntaxShape::ColumnPath) .required("Value", SyntaxShape::String) .rest(SyntaxShape::String) .filter()) @@ -53,18 +63,14 @@ impl Plugin for Add { fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { if let Some(args) = call_info.args.positional { match &args[0] { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s.clone()); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) + self.field = Some(table.as_column_path()?.item); } + + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } match &args[1] { Tagged { item: v, .. } => { diff --git a/src/plugins/binaryview.rs b/src/plugins/binaryview.rs index d5488d324..b834f440e 100644 --- a/src/plugins/binaryview.rs +++ b/src/plugins/binaryview.rs @@ -24,8 +24,7 @@ impl Plugin for BinaryView { let value_anchor = v.anchor(); match v.item { Value::Primitive(Primitive::Binary(b)) => { - let source = call_info.source_map.get(&value_anchor); - let _ = view_binary(&b, source, call_info.args.has("lores")); + let _ = view_binary(&b, value_anchor.as_ref(), call_info.args.has("lores")); } _ => {} } diff --git a/src/plugins/edit.rs b/src/plugins/edit.rs index db116fedf..34653bd66 100644 --- a/src/plugins/edit.rs +++ b/src/plugins/edit.rs @@ -1,10 +1,12 @@ use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, Value, + serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape, + Tagged, Value, }; +pub type ColumnPath = Tagged>>; + struct Edit { - field: Option, + field: Option, value: Option, } impl Edit { @@ -19,22 +21,25 @@ impl Edit { let value_tag = value.tag(); match (value.item, self.value.clone()) { (obj @ Value::Row(_), Some(v)) => match &self.field { - Some(f) => match obj.replace_data_at_path(value_tag, &f, v) { + Some(f) => match obj.replace_data_at_column_path(value_tag, &f, v) { Some(v) => return Ok(v), None => { - return Err(ShellError::string( + return Err(ShellError::labeled_error( "edit could not find place to insert column", + "column name", + &f.tag, )) } }, - None => Err(ShellError::string( + None => Err(ShellError::untagged_runtime_error( "edit needs a column when changing a value in a table", )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::labeled_error( + "Unrecognized type in stream", + "original value", + value_tag, + )), } } } @@ -43,7 +48,7 @@ impl Plugin for Edit { fn config(&mut self) -> Result { Ok(Signature::build("edit") .desc("Edit an existing column to have a new value.") - .required("Field", SyntaxShape::String) + .required("Field", SyntaxShape::ColumnPath) .required("Value", SyntaxShape::String) .filter()) } @@ -51,18 +56,13 @@ impl Plugin for Edit { fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { if let Some(args) = call_info.args.positional { match &args[0] { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s.clone()); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) + self.field = Some(table.as_column_path()?); } + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } match &args[1] { Tagged { item: v, .. } => { diff --git a/src/plugins/embed.rs b/src/plugins/embed.rs index 646db8091..97dd6a271 100644 --- a/src/plugins/embed.rs +++ b/src/plugins/embed.rs @@ -25,8 +25,10 @@ impl Embed { }); Ok(()) } - None => Err(ShellError::string( + None => Err(ShellError::labeled_error( "embed needs a field when embedding a value", + "original value", + &tag, )), }, } @@ -52,12 +54,7 @@ impl Plugin for Embed { self.field = Some(s.clone()); self.values = Vec::new(); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) - } + value => return Err(ShellError::type_error("string", value.tagged_type_name())), } } diff --git a/src/plugins/inc.rs b/src/plugins/inc.rs index ecab03dc9..38788014a 100644 --- a/src/plugins/inc.rs +++ b/src/plugins/inc.rs @@ -14,8 +14,10 @@ pub enum SemVerAction { Patch, } +pub type ColumnPath = Tagged>>; + struct Inc { - field: Option, + field: Option, error: Option, action: Option, } @@ -80,35 +82,42 @@ impl Inc { Value::Primitive(Primitive::Bytes(b)) => { Ok(Value::bytes(b + 1 as u64).tagged(value.tag())) } - Value::Primitive(Primitive::String(ref s)) => { - Ok(Tagged::from_item(self.apply(&s)?, value.tag())) - } + Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())), Value::Row(_) => match self.field { Some(ref f) => { - let replacement = match value.item.get_data_by_path(value.tag(), f) { + let replacement = match value.item.get_data_by_column_path(value.tag(), f) { Some(result) => self.inc(result.map(|x| x.clone()))?, None => { - return Err(ShellError::string("inc could not find field to replace")) + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + &f.tag, + )) } }; - match value - .item - .replace_data_at_path(value.tag(), f, replacement.item.clone()) - { + match value.item.replace_data_at_column_path( + value.tag(), + f, + replacement.item.clone(), + ) { Some(v) => return Ok(v), None => { - return Err(ShellError::string("inc could not find field to replace")) + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + &f.tag, + )) } } } - None => Err(ShellError::string( + None => Err(ShellError::untagged_runtime_error( "inc needs a field when incrementing a column in a table", )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::type_error( + "incrementable value", + value.tagged_type_name(), + )), } } } @@ -120,7 +129,7 @@ impl Plugin for Inc { .switch("major") .switch("minor") .switch("patch") - .rest(SyntaxShape::String) + .rest(SyntaxShape::ColumnPath) .filter()) } @@ -138,18 +147,13 @@ impl Plugin for Inc { if let Some(args) = call_info.args.positional { for arg in args { match arg { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - arg - ))) + self.field = Some(table.as_column_path()?); } + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } } } @@ -160,7 +164,11 @@ impl Plugin for Inc { match &self.error { Some(reason) => { - return Err(ShellError::string(format!("{}: {}", reason, Inc::usage()))) + return Err(ShellError::untagged_runtime_error(format!( + "{}: {}", + reason, + Inc::usage() + ))) } None => Ok(vec![]), } @@ -181,20 +189,18 @@ mod tests { use super::{Inc, SemVerAction}; use indexmap::IndexMap; use nu::{ - CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, SourceMap, Tag, Tagged, TaggedDictBuilder, - TaggedItem, Value, + CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, TaggedItem, + Value, }; struct CallStub { - anchor: uuid::Uuid, positionals: Vec>, flags: IndexMap>, } impl CallStub { - fn new(anchor: uuid::Uuid) -> CallStub { + fn new() -> CallStub { CallStub { - anchor, positionals: vec![], flags: indexmap::IndexMap::new(), } @@ -209,16 +215,20 @@ mod tests { } fn with_parameter(&mut self, name: &str) -> &mut Self { + let fields: Vec> = name + .split(".") + .map(|s| Value::string(s.to_string()).tagged(Tag::unknown())) + .collect(); + self.positionals - .push(Value::string(name.to_string()).tagged(Tag::unknown_span(self.anchor))); + .push(Value::Table(fields).tagged(Tag::unknown())); self } fn create(&self) -> CallInfo { CallInfo { args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())), - source_map: SourceMap::new(), - name_tag: Tag::unknown_span(self.anchor), + name_tag: Tag::unknown(), } } } @@ -245,7 +255,7 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter(CallStub::new(test_uuid()).with_long_flag("major").create()) + .begin_filter(CallStub::new().with_long_flag("major").create()) .is_ok()); assert!(plugin.action.is_some()); } @@ -255,7 +265,7 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter(CallStub::new(test_uuid()).with_long_flag("minor").create()) + .begin_filter(CallStub::new().with_long_flag("minor").create()) .is_ok()); assert!(plugin.action.is_some()); } @@ -265,7 +275,7 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter(CallStub::new(test_uuid()).with_long_flag("patch").create()) + .begin_filter(CallStub::new().with_long_flag("patch").create()) .is_ok()); assert!(plugin.action.is_some()); } @@ -276,7 +286,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("major") .with_long_flag("minor") .create(), @@ -290,14 +300,15 @@ mod tests { let mut plugin = Inc::new(); assert!(plugin - .begin_filter( - CallStub::new(test_uuid()) - .with_parameter("package.version") - .create() - ) + .begin_filter(CallStub::new().with_parameter("package.version").create()) .is_ok()); - assert_eq!(plugin.field, Some("package.version".to_string())); + assert_eq!( + plugin + .field + .map(|f| f.iter().map(|f| f.item.clone()).collect()), + Some(vec!["package".to_string(), "version".to_string()]) + ); } #[test] @@ -327,7 +338,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("major") .with_parameter("version") .create() @@ -355,7 +366,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("minor") .with_parameter("version") .create() @@ -384,7 +395,7 @@ mod tests { assert!(plugin .begin_filter( - CallStub::new(test_uuid()) + CallStub::new() .with_long_flag("patch") .with_parameter(&field) .create() @@ -405,8 +416,4 @@ mod tests { _ => {} } } - - fn test_uuid() -> uuid::Uuid { - uuid::Uuid::nil() - } } diff --git a/src/plugins/match.rs b/src/plugins/match.rs index 1f2aad83f..713352405 100644 --- a/src/plugins/match.rs +++ b/src/plugins/match.rs @@ -35,11 +35,12 @@ impl Plugin for Match { } => { self.column = s.clone(); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "value", + tag, + )); } } match &args[1] { @@ -49,11 +50,12 @@ impl Plugin for Match { } => { self.regex = Regex::new(s).unwrap(); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[1] - ))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "value", + tag, + )); } } } @@ -65,7 +67,7 @@ impl Plugin for Match { match &input { Tagged { item: Value::Row(dict), - .. + tag, } => { if let Some(val) = dict.entries.get(&self.column) { match val { @@ -75,22 +77,20 @@ impl Plugin for Match { } => { flag = self.regex.is_match(s); } - _ => { - return Err(ShellError::string(format!( - "value is not a string! {:?}", - &val - ))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error("expected string", "value", tag)); } } } else { - return Err(ShellError::string(format!( - "column not in row! {:?} {:?}", - &self.column, dict - ))); + return Err(ShellError::labeled_error( + format!("column not in row! {:?} {:?}", &self.column, dict), + "row", + tag, + )); } } - _ => { - return Err(ShellError::string(format!("Not a row! {:?}", &input))); + Tagged { tag, .. } => { + return Err(ShellError::labeled_error("Expected row", "value", tag)); } } if flag { diff --git a/src/plugins/ps.rs b/src/plugins/ps.rs index 1ae9938d3..2db73d395 100644 --- a/src/plugins/ps.rs +++ b/src/plugins/ps.rs @@ -40,7 +40,7 @@ async fn ps(tag: Tag) -> Vec> { let mut output = vec![]; while let Some(res) = processes.next().await { if let Ok((process, usage)) = res { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); dict.insert("pid", Value::int(process.pid())); if let Ok(name) = process.name().await { dict.insert("name", Value::string(name)); diff --git a/src/plugins/str.rs b/src/plugins/str.rs index 4b74914f0..60625e7f1 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -1,6 +1,6 @@ use nu::{ serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxShape, Tagged, Value, + SyntaxShape, Tagged, TaggedItem, Value, }; #[derive(Debug, Eq, PartialEq)] @@ -10,8 +10,10 @@ enum Action { ToInteger, } +pub type ColumnPath = Vec>; + struct Str { - field: Option, + field: Option, params: Option>, error: Option, action: Option, @@ -43,8 +45,8 @@ impl Str { Ok(applied) } - fn for_field(&mut self, field: &str) { - self.field = Some(String::from(field)); + fn for_field(&mut self, column_path: ColumnPath) { + self.field = Some(column_path); } fn permit(&mut self) -> bool { @@ -87,35 +89,38 @@ impl Str { impl Str { fn strutils(&self, value: Tagged) -> Result, ShellError> { match value.item { - Value::Primitive(Primitive::String(ref s)) => { - Ok(Tagged::from_item(self.apply(&s)?, value.tag())) - } + Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())), Value::Row(_) => match self.field { Some(ref f) => { - let replacement = match value.item.get_data_by_path(value.tag(), f) { + let replacement = match value.item.get_data_by_column_path(value.tag(), f) { Some(result) => self.strutils(result.map(|x| x.clone()))?, - None => return Ok(Tagged::from_item(Value::nothing(), value.tag)), + None => return Ok(Value::nothing().tagged(value.tag)), }; - match value - .item - .replace_data_at_path(value.tag(), f, replacement.item.clone()) - { + match value.item.replace_data_at_column_path( + value.tag(), + f, + replacement.item.clone(), + ) { Some(v) => return Ok(v), None => { - return Err(ShellError::string("str could not find field to replace")) + return Err(ShellError::type_error( + "column name", + value.tagged_type_name(), + )) } } } - None => Err(ShellError::string(format!( + None => Err(ShellError::untagged_runtime_error(format!( "{}: {}", "str needs a column when applied to a value in a row", Str::usage() ))), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::labeled_error( + "Unrecognized type in stream", + value.type_name(), + value.tag, + )), } } } @@ -127,7 +132,7 @@ impl Plugin for Str { .switch("downcase") .switch("upcase") .switch("to-int") - .rest(SyntaxShape::Member) + .rest(SyntaxShape::ColumnPath) .filter()) } @@ -148,20 +153,27 @@ impl Plugin for Str { match possible_field { Tagged { item: Value::Primitive(Primitive::String(s)), - .. + tag, } => match self.action { Some(Action::Downcase) | Some(Action::Upcase) | Some(Action::ToInteger) | None => { - self.for_field(&s); + self.for_field(vec![s.clone().tagged(tag)]); } }, + table @ Tagged { + item: Value::Table(_), + .. + } => { + self.field = Some(table.as_column_path()?.item); + } _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - possible_field - ))) + return Err(ShellError::labeled_error( + "Unrecognized type in params", + possible_field.type_name(), + &possible_field.tag, + )) } } } @@ -178,7 +190,11 @@ impl Plugin for Str { match &self.error { Some(reason) => { - return Err(ShellError::string(format!("{}: {}", reason, Str::usage()))) + return Err(ShellError::untagged_runtime_error(format!( + "{}: {}", + reason, + Str::usage() + ))) } None => Ok(vec![]), } @@ -198,13 +214,12 @@ mod tests { use super::{Action, Str}; use indexmap::IndexMap; use nu::{ - CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, SourceMap, Tag, Tagged, - TaggedDictBuilder, TaggedItem, Value, + CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, + TaggedItem, Value, }; use num_bigint::BigInt; struct CallStub { - anchor: uuid::Uuid, positionals: Vec>, flags: IndexMap>, } @@ -212,7 +227,6 @@ mod tests { impl CallStub { fn new() -> CallStub { CallStub { - anchor: uuid::Uuid::nil(), positionals: vec![], flags: indexmap::IndexMap::new(), } @@ -227,16 +241,20 @@ mod tests { } fn with_parameter(&mut self, name: &str) -> &mut Self { + let fields: Vec> = name + .split(".") + .map(|s| Value::string(s.to_string()).tagged(Tag::unknown())) + .collect(); + self.positionals - .push(Value::string(name.to_string()).tagged(Tag::unknown())); + .push(Value::Table(fields).tagged(Tag::unknown())); self } fn create(&self) -> CallInfo { CallInfo { args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())), - source_map: SourceMap::new(), - name_tag: Tag::unknown_span(self.anchor), + name_tag: Tag::unknown(), } } } @@ -248,7 +266,7 @@ mod tests { } fn unstructured_sample_record(value: &str) -> Tagged { - Tagged::from_item(Value::string(value), Tag::unknown()) + Value::string(value).tagged(Tag::unknown()) } #[test] @@ -303,7 +321,12 @@ mod tests { ) .is_ok()); - assert_eq!(plugin.field, Some("package.description".to_string())); + assert_eq!( + plugin + .field + .map(|f| f.into_iter().map(|f| f.item).collect()), + Some(vec!["package".to_string(), "description".to_string()]) + ) } #[test] diff --git a/src/plugins/sum.rs b/src/plugins/sum.rs index ffb39cb90..d08d45713 100644 --- a/src/plugins/sum.rs +++ b/src/plugins/sum.rs @@ -21,20 +21,22 @@ impl Sum { tag, }) => { //TODO: handle overflow - self.total = Some(Value::int(i + j).tagged(*tag)); + self.total = Some(Value::int(i + j).tagged(tag)); Ok(()) } None => { self.total = Some(value.clone()); Ok(()) } - _ => Err(ShellError::string(format!( - "Could not sum non-integer or unrelated types" - ))), + _ => Err(ShellError::labeled_error( + "Could not sum non-integer or unrelated types", + "source", + value.tag, + )), } } Value::Primitive(Primitive::Bytes(b)) => { - match self.total { + match &self.total { Some(Tagged { item: Value::Primitive(Primitive::Bytes(j)), tag, @@ -47,15 +49,18 @@ impl Sum { self.total = Some(value); Ok(()) } - _ => Err(ShellError::string(format!( - "Could not sum non-integer or unrelated types" - ))), + _ => Err(ShellError::labeled_error( + "Could not sum non-integer or unrelated types", + "source", + value.tag, + )), } } - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + x => Err(ShellError::labeled_error( + format!("Unrecognized type in stream: {:?}", x), + "source", + value.tag, + )), } } } diff --git a/src/plugins/sys.rs b/src/plugins/sys.rs index 1f86b51d7..55bf5028b 100644 --- a/src/plugins/sys.rs +++ b/src/plugins/sys.rs @@ -80,7 +80,7 @@ async fn mem(tag: Tag) -> Tagged { } async fn host(tag: Tag) -> Tagged { - let mut dict = TaggedDictBuilder::with_capacity(tag, 6); + let mut dict = TaggedDictBuilder::with_capacity(&tag, 6); let (platform_result, uptime_result) = futures::future::join(host::platform(), host::uptime()).await; @@ -95,7 +95,7 @@ async fn host(tag: Tag) -> Tagged { // Uptime if let Ok(uptime) = uptime_result { - let mut uptime_dict = TaggedDictBuilder::with_capacity(tag, 4); + let mut uptime_dict = TaggedDictBuilder::with_capacity(&tag, 4); let uptime = uptime.get::().round() as i64; let days = uptime / (60 * 60 * 24); @@ -116,7 +116,10 @@ async fn host(tag: Tag) -> Tagged { let mut user_vec = vec![]; while let Some(user) = users.next().await { if let Ok(user) = user { - user_vec.push(Tagged::from_item(Value::string(user.username()), tag)); + user_vec.push(Tagged { + item: Value::string(user.username()), + tag: tag.clone(), + }); } } let user_list = Value::Table(user_vec); @@ -130,7 +133,7 @@ async fn disks(tag: Tag) -> Option { let mut partitions = disk::partitions_physical(); while let Some(part) = partitions.next().await { if let Ok(part) = part { - let mut dict = TaggedDictBuilder::with_capacity(tag, 6); + let mut dict = TaggedDictBuilder::with_capacity(&tag, 6); dict.insert( "device", Value::string( @@ -176,7 +179,7 @@ async fn battery(tag: Tag) -> Option { if let Ok(batteries) = manager.batteries() { for battery in batteries { if let Ok(battery) = battery { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); if let Some(vendor) = battery.vendor() { dict.insert("vendor", Value::string(vendor)); } @@ -217,7 +220,7 @@ async fn temp(tag: Tag) -> Option { let mut sensors = sensors::temperatures(); while let Some(sensor) = sensors.next().await { if let Ok(sensor) = sensor { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); dict.insert("unit", Value::string(sensor.unit())); if let Some(label) = sensor.label() { dict.insert("label", Value::string(label)); @@ -259,7 +262,7 @@ async fn net(tag: Tag) -> Option { let mut io_counters = net::io_counters(); while let Some(nic) = io_counters.next().await { if let Ok(nic) = nic { - let mut network_idx = TaggedDictBuilder::with_capacity(tag, 3); + let mut network_idx = TaggedDictBuilder::with_capacity(&tag, 3); network_idx.insert("name", Value::string(nic.interface())); network_idx.insert( "sent", @@ -280,11 +283,17 @@ async fn net(tag: Tag) -> Option { } async fn sysinfo(tag: Tag) -> Vec> { - let mut sysinfo = TaggedDictBuilder::with_capacity(tag, 7); + let mut sysinfo = TaggedDictBuilder::with_capacity(&tag, 7); - let (host, cpu, disks, memory, temp) = - futures::future::join5(host(tag), cpu(tag), disks(tag), mem(tag), temp(tag)).await; - let (net, battery) = futures::future::join(net(tag), battery(tag)).await; + let (host, cpu, disks, memory, temp) = futures::future::join5( + host(tag.clone()), + cpu(tag.clone()), + disks(tag.clone()), + mem(tag.clone()), + temp(tag.clone()), + ) + .await; + let (net, battery) = futures::future::join(net(tag.clone()), battery(tag.clone())).await; sysinfo.insert_tagged("host", host); if let Some(cpu) = cpu { diff --git a/src/plugins/textview.rs b/src/plugins/textview.rs index cce8bd708..88507183e 100644 --- a/src/plugins/textview.rs +++ b/src/plugins/textview.rs @@ -1,8 +1,7 @@ use crossterm::{cursor, terminal, RawScreen}; use crossterm::{InputEvent, KeyEvent}; use nu::{ - serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, SourceMap, - Tagged, Value, + serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, Tagged, Value, }; use syntect::easy::HighlightLines; @@ -29,8 +28,8 @@ impl Plugin for TextView { Ok(Signature::build("textview").desc("Autoview of text data.")) } - fn sink(&mut self, call_info: CallInfo, input: Vec>) { - view_text_value(&input[0], &call_info.source_map); + fn sink(&mut self, _call_info: CallInfo, input: Vec>) { + view_text_value(&input[0]); } } @@ -215,20 +214,18 @@ fn scroll_view(s: &str) { scroll_view_lines_if_needed(v, false); } -fn view_text_value(value: &Tagged, source_map: &SourceMap) { +fn view_text_value(value: &Tagged) { let value_anchor = value.anchor(); match value.item { Value::Primitive(Primitive::String(ref s)) => { - let source = source_map.get(&value_anchor); - - if let Some(source) = source { + if let Some(source) = value_anchor { let extension: Option = match source { AnchorLocation::File(file) => { - let path = Path::new(file); + let path = Path::new(&file); path.extension().map(|x| x.to_string_lossy().to_string()) } AnchorLocation::Url(url) => { - let url = url::Url::parse(url); + let url = url::Url::parse(&url); if let Ok(url) = url { let url = url.clone(); if let Some(mut segments) = url.path_segments() { diff --git a/src/prelude.rs b/src/prelude.rs index eabd77871..4b12a07bd 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -1,3 +1,13 @@ +#[macro_export] +macro_rules! return_err { + ($expr:expr) => { + match $expr { + Err(_) => return, + Ok(expr) => expr, + }; + }; +} + #[macro_export] macro_rules! stream { ($($expr:expr),*) => {{ @@ -56,7 +66,7 @@ pub(crate) use crate::commands::RawCommandArgs; pub(crate) use crate::context::CommandRegistry; pub(crate) use crate::context::{AnchorLocation, Context}; pub(crate) use crate::data::base as value; -pub(crate) use crate::data::meta::{Tag, Tagged, TaggedItem}; +pub(crate) use crate::data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem}; pub(crate) use crate::data::types::ExtractType; pub(crate) use crate::data::{Primitive, Value}; pub(crate) use crate::env::host::handle_unexpected; @@ -99,6 +109,22 @@ where } } +pub trait ToInputStream { + fn to_input_stream(self) -> InputStream; +} + +impl ToInputStream for T +where + T: Stream + Send + 'static, + U: Into, ShellError>>, +{ + fn to_input_stream(self) -> InputStream { + InputStream { + values: self.map(|item| item.into().unwrap()).boxed(), + } + } +} + pub trait ToOutputStream { fn to_output_stream(self) -> OutputStream; } diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index 3c1ae79ea..f0adeebeb 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -3,7 +3,6 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::data::dir_entry_dict; use crate::prelude::*; use crate::shell::completer::NuCompleter; @@ -12,6 +11,7 @@ use crate::utils::FileStructure; use rustyline::completion::FilenameCompleter; use rustyline::hint::{Hinter, HistoryHinter}; use std::path::{Path, PathBuf}; +use std::sync::atomic::Ordering; pub struct FilesystemShell { pub(crate) path: String, @@ -73,7 +73,7 @@ impl FilesystemShell { } impl Shell for FilesystemShell { - fn name(&self, _source_map: &SourceMap) -> String { + fn name(&self) -> String { "filesystem".to_string() } @@ -84,7 +84,7 @@ impl Shell for FilesystemShell { fn ls( &self, pattern: Option>, - command_tag: Tag, + context: &RunnableContext, ) -> Result { let cwd = self.path(); let mut full_path = PathBuf::from(self.path()); @@ -94,7 +94,8 @@ impl Shell for FilesystemShell { _ => {} } - let mut shell_entries = VecDeque::new(); + let ctrl_c = context.ctrl_c.clone(); + let name_tag = context.name.clone(); //If it's not a glob, try to display the contents of the entry if it's a directory let lossy_path = full_path.to_string_lossy(); @@ -114,24 +115,33 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( e.to_string(), e.to_string(), - command_tag, + name_tag, )); } } Ok(o) => o, }; - for entry in entries { - let entry = entry?; - let filepath = entry.path(); - let filename = if let Ok(fname) = filepath.strip_prefix(&cwd) { - fname - } else { - Path::new(&filepath) - }; - let value = dir_entry_dict(filename, &entry.metadata()?, command_tag)?; - shell_entries.push_back(ReturnSuccess::value(value)) - } - return Ok(shell_entries.to_output_stream()); + let stream = async_stream! { + for entry in entries { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + if let Ok(entry) = entry { + let filepath = entry.path(); + if let Ok(metadata) = std::fs::symlink_metadata(&filepath) { + let filename = if let Ok(fname) = filepath.strip_prefix(&cwd) { + fname + } else { + Path::new(&filepath) + }; + + let value = dir_entry_dict(filename, &metadata, &name_tag)?; + yield ReturnSuccess::value(value); + } + } + } + }; + return Ok(stream.to_output_stream()); } } @@ -145,26 +155,33 @@ impl Shell for FilesystemShell { source.tag(), )); } else { - return Err(ShellError::string("Invalid pattern.")); + return Err(ShellError::untagged_runtime_error("Invalid pattern.")); } } }; // Enumerate the entries from the glob and add each - for entry in entries { - if let Ok(entry) = entry { - let filename = if let Ok(fname) = entry.strip_prefix(&cwd) { - fname - } else { - Path::new(&entry) - }; - let metadata = std::fs::metadata(&entry)?; - let value = dir_entry_dict(filename, &metadata, command_tag)?; - shell_entries.push_back(ReturnSuccess::value(value)) - } - } + let stream = async_stream! { + for entry in entries { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + if let Ok(entry) = entry { + if let Ok(metadata) = std::fs::symlink_metadata(&entry) { + let filename = if let Ok(fname) = entry.strip_prefix(&cwd) { + fname + } else { + Path::new(&entry) + }; - Ok(shell_entries.to_output_stream()) + if let Ok(value) = dir_entry_dict(filename, &metadata, &name_tag) { + yield ReturnSuccess::value(value); + } + } + } + } + }; + Ok(stream.to_output_stream()) } fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { @@ -175,7 +192,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Can not change to home directory", "can not go to home", - args.call_info.name_tag, + &args.call_info.name_tag, )) } }, @@ -957,7 +974,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "unable to show current directory", "pwd command failed", - args.call_info.name_tag, + &args.call_info.name_tag, )); } }; @@ -965,7 +982,7 @@ impl Shell for FilesystemShell { let mut stream = VecDeque::new(); stream.push_back(ReturnSuccess::value( Value::Primitive(Primitive::String(p.to_string_lossy().to_string())) - .tagged(args.call_info.name_tag), + .tagged(&args.call_info.name_tag), )); Ok(stream.into()) diff --git a/src/shell/help_shell.rs b/src/shell/help_shell.rs index 0fedd9ad7..7c0e74bde 100644 --- a/src/shell/help_shell.rs +++ b/src/shell/help_shell.rs @@ -3,7 +3,6 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::data::{command_dict, TaggedDictBuilder}; use crate::prelude::*; use crate::shell::shell::Shell; @@ -98,8 +97,8 @@ impl HelpShell { } impl Shell for HelpShell { - fn name(&self, source_map: &SourceMap) -> String { - let anchor_name = self.value.anchor_name(source_map); + fn name(&self) -> String { + let anchor_name = self.value.anchor_name(); format!( "{}", match anchor_name { @@ -129,7 +128,7 @@ impl Shell for HelpShell { fn ls( &self, _pattern: Option>, - _command_tag: Tag, + _context: &RunnableContext, ) -> Result { Ok(self .commands() diff --git a/src/shell/helper.rs b/src/shell/helper.rs index 6fb454435..dc3ab96dc 100644 --- a/src/shell/helper.rs +++ b/src/shell/helper.rs @@ -1,10 +1,11 @@ +use crate::context::Context; +use crate::parser::hir::syntax_shape::{color_fallible_syntax, FlatShape, PipelineShape}; +use crate::parser::hir::TokensIterator; use crate::parser::nom_input; use crate::parser::parse::token_tree::TokenNode; -use crate::parser::parse::tokens::RawToken; -use crate::parser::{Pipeline, PipelineElement}; -use crate::shell::shell_manager::ShellManager; -use crate::Tagged; +use crate::{Span, Spanned, SpannedItem, Tag, Tagged, Text}; use ansi_term::Color; +use log::trace; use rustyline::completion::Completer; use rustyline::error::ReadlineError; use rustyline::highlight::Highlighter; @@ -12,12 +13,12 @@ use rustyline::hint::Hinter; use std::borrow::Cow::{self, Owned}; pub(crate) struct Helper { - helper: ShellManager, + context: Context, } impl Helper { - pub(crate) fn new(helper: ShellManager) -> Helper { - Helper { helper } + pub(crate) fn new(context: Context) -> Helper { + Helper { context } } } @@ -29,7 +30,7 @@ impl Completer for Helper { pos: usize, ctx: &rustyline::Context<'_>, ) -> Result<(usize, Vec), ReadlineError> { - self.helper.complete(line, pos, ctx) + self.context.shell_manager.complete(line, pos, ctx) } } @@ -52,7 +53,7 @@ impl Completer for Helper { impl Hinter for Helper { fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { - self.helper.hint(line, pos, ctx) + self.context.shell_manager.hint(line, pos, ctx) } } @@ -66,7 +67,7 @@ impl Highlighter for Helper { } fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> { - let tokens = crate::parser::pipeline(nom_input(line, uuid::Uuid::nil())); + let tokens = crate::parser::pipeline(nom_input(line)); match tokens { Err(_) => Cow::Borrowed(line), @@ -77,24 +78,42 @@ impl Highlighter for Helper { Ok(v) => v, }; - let Pipeline { parts, post_ws } = pipeline; - let mut iter = parts.into_iter(); + let tokens = vec![TokenNode::Pipeline(pipeline.clone().spanned(v.span()))]; + let mut tokens = TokensIterator::all(&tokens[..], v.span()); - loop { - match iter.next() { - None => { - if let Some(ws) = post_ws { - out.push_str(ws.slice(line)); - } + let text = Text::from(line); + let expand_context = self + .context + .expand_context(&text, Span::new(0, line.len() - 1)); + let mut shapes = vec![]; - return Cow::Owned(out); - } - Some(token) => { - let styled = paint_pipeline_element(&token, line); - out.push_str(&styled.to_string()); - } - } + // We just constructed a token list that only contains a pipeline, so it can't fail + color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context, &mut shapes) + .unwrap(); + + trace!(target: "nu::shapes", + "SHAPES :: {:?}", + shapes.iter().map(|shape| shape.item).collect::>() + ); + + for shape in shapes { + let styled = paint_flat_shape(shape, line); + out.push_str(&styled); } + + Cow::Owned(out) + + // loop { + // match iter.next() { + // None => { + // return Cow::Owned(out); + // } + // Some(token) => { + // let styled = paint_pipeline_element(&token, line); + // out.push_str(&styled.to_string()); + // } + // } + // } } } } @@ -104,83 +123,55 @@ impl Highlighter for Helper { } } -fn paint_token_node(token_node: &TokenNode, line: &str) -> String { - let styled = match token_node { - TokenNode::Call(..) => Color::Cyan.bold().paint(token_node.tag().slice(line)), - TokenNode::Whitespace(..) => Color::White.normal().paint(token_node.tag().slice(line)), - TokenNode::Flag(..) => Color::Black.bold().paint(token_node.tag().slice(line)), - TokenNode::Member(..) => Color::Yellow.bold().paint(token_node.tag().slice(line)), - TokenNode::Path(..) => Color::Green.bold().paint(token_node.tag().slice(line)), - TokenNode::Error(..) => Color::Red.bold().paint(token_node.tag().slice(line)), - TokenNode::Delimited(..) => Color::White.paint(token_node.tag().slice(line)), - TokenNode::Operator(..) => Color::White.normal().paint(token_node.tag().slice(line)), - TokenNode::Pipeline(..) => Color::Blue.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Number(..), - .. - }) => Color::Purple.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Size(..), - .. - }) => Color::Purple.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::GlobPattern, - .. - }) => Color::Cyan.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::String(..), - .. - }) => Color::Green.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Variable(..), - .. - }) => Color::Yellow.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Bare, - .. - }) => Color::Green.normal().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::ExternalCommand(..), - .. - }) => Color::Cyan.bold().paint(token_node.tag().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::ExternalWord, - .. - }) => Color::Black.bold().paint(token_node.tag().slice(line)), - }; +#[allow(unused)] +fn vec_tag(input: Vec>) -> Option { + let mut iter = input.iter(); + let first = iter.next()?.tag.clone(); + let last = iter.last(); - styled.to_string() + Some(match last { + None => first, + Some(last) => first.until(&last.tag), + }) } -fn paint_pipeline_element(pipeline_element: &PipelineElement, line: &str) -> String { - let mut styled = String::new(); - - if let Some(_) = pipeline_element.pipe { - styled.push_str(&Color::Purple.paint("|")); - } - - if let Some(ws) = pipeline_element.pre_ws { - styled.push_str(&Color::White.normal().paint(ws.slice(line))); - } - - styled.push_str( - &Color::Cyan - .bold() - .paint(pipeline_element.call().head().tag().slice(line)) - .to_string(), - ); - - if let Some(children) = pipeline_element.call().children() { - for child in children { - styled.push_str(&paint_token_node(child, line)); +fn paint_flat_shape(flat_shape: Spanned, line: &str) -> String { + let style = match &flat_shape.item { + FlatShape::OpenDelimiter(_) => Color::White.normal(), + FlatShape::CloseDelimiter(_) => Color::White.normal(), + FlatShape::ItVariable => Color::Purple.bold(), + FlatShape::Variable => Color::Purple.normal(), + FlatShape::Operator => Color::Yellow.normal(), + FlatShape::Dot => Color::White.normal(), + FlatShape::InternalCommand => Color::Cyan.bold(), + FlatShape::ExternalCommand => Color::Cyan.normal(), + FlatShape::ExternalWord => Color::Black.bold(), + FlatShape::BareMember => Color::Yellow.bold(), + FlatShape::StringMember => Color::Yellow.bold(), + FlatShape::String => Color::Green.normal(), + FlatShape::Path => Color::Cyan.normal(), + FlatShape::GlobPattern => Color::Cyan.bold(), + FlatShape::Word => Color::Green.normal(), + FlatShape::Pipe => Color::Purple.bold(), + FlatShape::Flag => Color::Black.bold(), + FlatShape::ShorthandFlag => Color::Black.bold(), + FlatShape::Int => Color::Purple.bold(), + FlatShape::Decimal => Color::Purple.bold(), + FlatShape::Whitespace => Color::White.normal(), + FlatShape::Error => Color::Red.bold(), + FlatShape::Size { number, unit } => { + let number = number.slice(line); + let unit = unit.slice(line); + return format!( + "{}{}", + Color::Purple.bold().paint(number), + Color::Cyan.bold().paint(unit) + ); } - } + }; - if let Some(ws) = pipeline_element.post_ws { - styled.push_str(&Color::White.normal().paint(ws.slice(line))); - } - - styled.to_string() + let body = flat_shape.span.slice(line); + style.paint(body).to_string() } impl rustyline::Helper for Helper {} diff --git a/src/shell/shell.rs b/src/shell/shell.rs index c567e474a..507fc0517 100644 --- a/src/shell/shell.rs +++ b/src/shell/shell.rs @@ -3,20 +3,19 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::errors::ShellError; use crate::prelude::*; use crate::stream::OutputStream; use std::path::PathBuf; pub trait Shell: std::fmt::Debug { - fn name(&self, source_map: &SourceMap) -> String; + fn name(&self) -> String; fn homedir(&self) -> Option; fn ls( &self, pattern: Option>, - command_tag: Tag, + context: &RunnableContext, ) -> Result; fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result; fn cp(&self, args: CopyArgs, name: Tag, path: &str) -> Result; diff --git a/src/shell/shell_manager.rs b/src/shell/shell_manager.rs index c4c42367e..149fdd58d 100644 --- a/src/shell/shell_manager.rs +++ b/src/shell/shell_manager.rs @@ -10,18 +10,19 @@ use crate::shell::shell::Shell; use crate::stream::OutputStream; use std::error::Error; use std::path::PathBuf; +use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::{Arc, Mutex}; #[derive(Clone, Debug)] pub struct ShellManager { - pub(crate) current_shell: usize, + pub(crate) current_shell: Arc, pub(crate) shells: Arc>>>, } impl ShellManager { pub fn basic(commands: CommandRegistry) -> Result> { Ok(ShellManager { - current_shell: 0, + current_shell: Arc::new(AtomicUsize::new(0)), shells: Arc::new(Mutex::new(vec![Box::new(FilesystemShell::basic( commands, )?)])), @@ -30,24 +31,29 @@ impl ShellManager { pub fn insert_at_current(&mut self, shell: Box) { self.shells.lock().unwrap().push(shell); - self.current_shell = self.shells.lock().unwrap().len() - 1; + self.current_shell + .store(self.shells.lock().unwrap().len() - 1, Ordering::SeqCst); self.set_path(self.path()); } + pub fn current_shell(&self) -> usize { + self.current_shell.load(Ordering::SeqCst) + } + pub fn remove_at_current(&mut self) { { let mut shells = self.shells.lock().unwrap(); if shells.len() > 0 { - if self.current_shell == shells.len() - 1 { + if self.current_shell() == shells.len() - 1 { shells.pop(); let new_len = shells.len(); if new_len > 0 { - self.current_shell = new_len - 1; + self.current_shell.store(new_len - 1, Ordering::SeqCst); } else { return; } } else { - shells.remove(self.current_shell); + shells.remove(self.current_shell()); } } } @@ -59,17 +65,17 @@ impl ShellManager { } pub fn path(&self) -> String { - self.shells.lock().unwrap()[self.current_shell].path() + self.shells.lock().unwrap()[self.current_shell()].path() } pub fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].pwd(args) + env[self.current_shell()].pwd(args) } pub fn set_path(&mut self, path: String) { - self.shells.lock().unwrap()[self.current_shell].set_path(path) + self.shells.lock().unwrap()[self.current_shell()].set_path(path) } pub fn complete( @@ -78,20 +84,21 @@ impl ShellManager { pos: usize, ctx: &rustyline::Context<'_>, ) -> Result<(usize, Vec), rustyline::error::ReadlineError> { - self.shells.lock().unwrap()[self.current_shell].complete(line, pos, ctx) + self.shells.lock().unwrap()[self.current_shell()].complete(line, pos, ctx) } pub fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { - self.shells.lock().unwrap()[self.current_shell].hint(line, pos, ctx) + self.shells.lock().unwrap()[self.current_shell()].hint(line, pos, ctx) } pub fn next(&mut self) { { let shell_len = self.shells.lock().unwrap().len(); - if self.current_shell == (shell_len - 1) { - self.current_shell = 0; + if self.current_shell() == (shell_len - 1) { + self.current_shell.store(0, Ordering::SeqCst); } else { - self.current_shell += 1; + self.current_shell + .store(self.current_shell() + 1, Ordering::SeqCst); } } self.set_path(self.path()); @@ -100,10 +107,11 @@ impl ShellManager { pub fn prev(&mut self) { { let shell_len = self.shells.lock().unwrap().len(); - if self.current_shell == 0 { - self.current_shell = shell_len - 1; + if self.current_shell() == 0 { + self.current_shell.store(shell_len - 1, Ordering::SeqCst); } else { - self.current_shell -= 1; + self.current_shell + .store(self.current_shell() - 1, Ordering::SeqCst); } } self.set_path(self.path()); @@ -112,23 +120,23 @@ impl ShellManager { pub fn homedir(&self) -> Option { let env = self.shells.lock().unwrap(); - env[self.current_shell].homedir() + env[self.current_shell()].homedir() } pub fn ls( &self, path: Option>, - command_tag: Tag, + context: &RunnableContext, ) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].ls(path, command_tag) + env[self.current_shell()].ls(path, context) } pub fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].cd(args) + env[self.current_shell()].cd(args) } pub fn cp( @@ -140,13 +148,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].cp(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].cp(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -160,13 +168,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].rm(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].rm(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -180,13 +188,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].mkdir(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].mkdir(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -200,13 +208,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].mv(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].mv(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } diff --git a/src/shell/value_shell.rs b/src/shell/value_shell.rs index d95d07cb9..0aa9e341b 100644 --- a/src/shell/value_shell.rs +++ b/src/shell/value_shell.rs @@ -3,7 +3,6 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::prelude::*; use crate::shell::shell::Shell; use crate::utils::ValueStructure; @@ -72,8 +71,8 @@ impl ValueShell { } impl Shell for ValueShell { - fn name(&self, source_map: &SourceMap) -> String { - let anchor_name = self.value.anchor_name(source_map); + fn name(&self) -> String { + let anchor_name = self.value.anchor_name(); format!( "{}", match anchor_name { @@ -90,9 +89,10 @@ impl Shell for ValueShell { fn ls( &self, target: Option>, - command_name: Tag, + context: &RunnableContext, ) -> Result { let mut full_path = PathBuf::from(self.path()); + let name_tag = context.name.clone(); match &target { Some(value) => full_path.push(value.as_ref()), @@ -114,7 +114,7 @@ impl Shell for ValueShell { return Err(ShellError::labeled_error( "Can not list entries inside", "No such path exists", - command_name, + name_tag, )); } @@ -166,7 +166,7 @@ impl Shell for ValueShell { return Err(ShellError::labeled_error( "Can not change to path inside", "No such path exists", - args.call_info.name_tag, + &args.call_info.name_tag, )); } @@ -213,10 +213,9 @@ impl Shell for ValueShell { fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { let mut stream = VecDeque::new(); - stream.push_back(ReturnSuccess::value(Tagged::from_item( - Value::string(self.path()), - args.call_info.name_tag, - ))); + stream.push_back(ReturnSuccess::value( + Value::string(self.path()).tagged(&args.call_info.name_tag), + )); Ok(stream.into()) } diff --git a/src/stream.rs b/src/stream.rs index 066acb74a..f6f2d5e2e 100644 --- a/src/stream.rs +++ b/src/stream.rs @@ -23,6 +23,17 @@ impl InputStream { } } +impl Stream for InputStream { + type Item = Tagged; + + fn poll_next( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> core::task::Poll> { + Stream::poll_next(std::pin::Pin::new(&mut self.values), cx) + } +} + impl From>> for InputStream { fn from(input: BoxStream<'static, Tagged>) -> InputStream { InputStream { values: input } diff --git a/tests/command_config_test.rs b/tests/command_config_test.rs index dd0f4e0eb..8a45be47c 100644 --- a/tests/command_config_test.rs +++ b/tests/command_config_test.rs @@ -86,30 +86,30 @@ fn sets_configuration_value() { h::delete_file_at(nu::config_path().unwrap().join("test_4.toml")); } -#[test] -fn removes_configuration_value() { - Playground::setup("config_test_5", |dirs, sandbox| { - sandbox.with_files(vec![FileWithContent( - "test_5.toml", - r#" - caballeros = [1, 1, 1] - podershell = [1, 1, 1] - "#, - )]); +// #[test] +// fn removes_configuration_value() { +// Playground::setup("config_test_5", |dirs, sandbox| { +// sandbox.with_files(vec![FileWithContent( +// "test_5.toml", +// r#" +// caballeros = [1, 1, 1] +// podershell = [1, 1, 1] +// "#, +// )]); - nu!( - cwd: dirs.test(), - "config --load test_5.toml --remove podershell" - ); +// nu!( +// cwd: dirs.test(), +// "config --load test_5.toml --remove podershell" +// ); - let actual = nu_error!( - cwd: dirs.root(), - r#"open "{}/test_5.toml" | get podershell | echo $it"#, - dirs.config_path() - ); +// let actual = nu_error!( +// cwd: dirs.root(), +// r#"open "{}/test_5.toml" | get podershell | echo $it"#, +// dirs.config_path() +// ); - assert!(actual.contains("Unknown column")); - }); +// assert!(actual.contains("Unknown column")); +// }); - h::delete_file_at(nu::config_path().unwrap().join("test_5.toml")); -} +// h::delete_file_at(nu::config_path().unwrap().join("test_5.toml")); +// } diff --git a/tests/command_open_tests.rs b/tests/command_open_tests.rs index 54dc7ad54..53e393eef 100644 --- a/tests/command_open_tests.rs +++ b/tests/command_open_tests.rs @@ -212,7 +212,7 @@ fn open_can_parse_ini() { fn open_can_parse_utf16_ini() { let actual = nu!( cwd: "tests/fixtures/formats", - "open utf16.ini | get .ShellClassInfo | get IconIndex | echo $it" + "open utf16.ini | get '.ShellClassInfo' | get IconIndex | echo $it" ); assert_eq!(actual, "-236") @@ -222,7 +222,7 @@ fn open_can_parse_utf16_ini() { fn errors_if_file_not_found() { let actual = nu_error!( cwd: "tests/fixtures/formats", - "open i_dont_exist.txt | echo $it" + "open i_dont_exist.txt" ); assert!(actual.contains("File could not be opened")); diff --git a/tests/helpers/mod.rs b/tests/helpers/mod.rs index 04fd88992..199038b53 100644 --- a/tests/helpers/mod.rs +++ b/tests/helpers/mod.rs @@ -93,6 +93,7 @@ macro_rules! nu { .write_all(commands.as_bytes()) .expect("couldn't write to stdin"); + let output = process .wait_with_output() .expect("couldn't read from stdout");