diff --git a/.azure/azure-pipelines.yml b/.azure/azure-pipelines.yml index 50ebcf1e24..2ab7e05c46 100644 --- a/.azure/azure-pipelines.yml +++ b/.azure/azure-pipelines.yml @@ -5,10 +5,25 @@ strategy: matrix: linux-nightly: image: ubuntu-16.04 + style: 'unflagged' macos-nightly: image: macos-10.14 + style: 'unflagged' windows-nightly: image: vs2017-win2016 + style: 'unflagged' + linux-nightly-canary: + image: ubuntu-16.04 + style: 'canary' + macos-nightly-canary: + image: macos-10.14 + style: 'canary' + windows-nightly-canary: + image: vs2017-win2016 + style: 'canary' + fmt: + image: ubuntu-16.04 + style: 'fmt' pool: vmImage: $(image) @@ -16,10 +31,22 @@ pool: steps: - bash: | set -e + if [ -e /etc/debian_version ] + then + sudo apt-get -y install libxcb-composite0-dev libx11-dev + fi curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain `cat rust-toolchain` export PATH=$HOME/.cargo/bin:$PATH rustc -Vv echo "##vso[task.prependpath]$HOME/.cargo/bin" + rustup component add rustfmt --toolchain `cat rust-toolchain` displayName: Install Rust - - bash: RUSTFLAGS="-D warnings" cargo test + - bash: RUSTFLAGS="-D warnings" cargo test --all-features + condition: eq(variables['style'], 'unflagged') displayName: Run tests + - bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all-features + condition: eq(variables['style'], 'canary') + displayName: Run tests + - bash: cargo fmt --all -- --check + condition: eq(variables['style'], 'fmt') + displayName: Lint diff --git a/.circleci/config.yml b/.circleci/config.yml index 80dd80017d..1595472977 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,21 +9,16 @@ version: 2.1 commands: - check_token: - description: Check that QUAY_TOKEN is provided in environment - steps: - - run: - if [[ -z "${QUAY_TOKEN}" ]]; then - echo "QUAY_TOKEN is undefined. Add to CircleCI environment to continue." - exit 1; - fi - pull_cache: - description: Pulls Quay.io docker images usable for our cache + description: Pulls Quay.io docker images (latest) for our cache + parameters: + tag: + type: string + default: "devel" steps: - - run: docker pull quay.io/nushell/nu:latest - - run: docker pull quay.io/nushell/nu-base:latest - + - run: echo "Tag is << parameters.tag >>" + - run: docker pull quay.io/nushell/nu:<< parameters.tag >> + - run: docker pull quay.io/nushell/nu-base:<< parameters.tag >> orbs: # https://circleci.com/orbs/registry/orb/circleci/docker @@ -40,13 +35,12 @@ workflows: image: nushell/nu-base tag: latest dockerfile: docker/Dockerfile.nu-base - extra_build_args: --cache-from=quay.io/nushell/nu-base:latest,quay.io/nushell/nu:latest + extra_build_args: --cache-from=quay.io/nushell/nu-base:devel filters: branches: ignore: - master before_build: - - check_token - pull_cache after_build: - run: @@ -58,31 +52,41 @@ workflows: command: | DOCKER_TAG=$(docker run quay.io/nushell/nu --version | cut -d' ' -f2) echo "Version that would be used for Docker tag is v${DOCKER_TAG}" + - run: + name: Test Executable + command: | + docker run --rm quay.io/nushell/nu-base --help + docker run --rm quay.io/nushell/nu --help # workflow publishes to Docker Hub, with each job having different triggers build_with_deploy: jobs: - # Deploy versioned and latest images on tags (releases) only. + # Deploy versioned and latest images on tags (releases) only - builds --release. - docker/publish: image: nushell/nu-base registry: quay.io tag: latest dockerfile: docker/Dockerfile.nu-base - extra_build_args: --cache-from=quay.io/nushell/nu-base:latest,quay.io/nushell/nu:latest + extra_build_args: --cache-from=quay.io/nushell/nu-base:latest,quay.io/nushell/nu:latest --build-arg RELEASE=true filters: branches: ignore: /.*/ tags: only: /^v.*/ before_build: - - check_token - - pull_cache + - run: docker pull quay.io/nushell/nu:latest + - run: docker pull quay.io/nushell/nu-base:latest after_build: - run: name: Build Multistage (smaller) container command: | docker build -f docker/Dockerfile -t quay.io/nushell/nu . + - run: + name: Test Executable + command: | + docker run --rm quay.io/nushell/nu --help + docker run --rm quay.io/nushell/nu-base --help - run: name: Publish Docker Tag with Nushell Version command: | @@ -90,12 +94,11 @@ workflows: echo "Version for Docker tag is ${DOCKER_TAG}" docker tag quay.io/nushell/nu-base:latest quay.io/nushell/nu-base:${DOCKER_TAG} docker tag quay.io/nushell/nu:latest quay.io/nushell/nu:${DOCKER_TAG} - docker login -u="nushell+circleci" -p="${QUAY_TOKEN}" quay.io docker push quay.io/nushell/nu-base docker push quay.io/nushell/nu - # publish devel to Docker Hub on merge to master + # publish devel to Docker Hub on merge to master (doesn't build --release) build_with_deploy_devel: jobs: @@ -105,9 +108,8 @@ workflows: registry: quay.io tag: devel dockerfile: docker/Dockerfile.nu-base - extra_build_args: --cache-from=quay.io/nushell/nu-base:latest,quay.io/nushell/nu:latest + extra_build_args: --cache-from=quay.io/nushell/nu-base:devel before_build: - - check_token - pull_cache filters: branches: @@ -117,9 +119,47 @@ workflows: name: Build Multistage (smaller) container command: | docker build --build-arg FROMTAG=devel -f docker/Dockerfile -t quay.io/nushell/nu:devel . + - run: + name: Test Executable + command: | + docker run --rm quay.io/nushell/nu:devel --help + docker run --rm quay.io/nushell/nu-base:devel --help - run: name: Publish Development Docker Tags command: | - docker login -u="nushell+circleci" -p="${QUAY_TOKEN}" quay.io docker push quay.io/nushell/nu-base:devel docker push quay.io/nushell/nu:devel + + nightly: + triggers: + - schedule: + cron: "0 0 * * *" + filters: + branches: + only: + - master + jobs: + - docker/publish: + image: nushell/nu-base + registry: quay.io + tag: nightly + dockerfile: docker/Dockerfile.nu-base + extra_build_args: --cache-from=quay.io/nushell/nu-base:nightly --build-arg RELEASE=true + before_build: + - run: docker pull quay.io/nushell/nu:nightly + - run: docker pull quay.io/nushell/nu-base:nightly + after_build: + - run: + name: Build Multistage (smaller) container + command: | + docker build -f docker/Dockerfile -t quay.io/nushell/nu:nightly . + - run: + name: Test Executable + command: | + docker run --rm quay.io/nushell/nu:nightly --help + docker run --rm quay.io/nushell/nu-base:nightly --help + - run: + name: Publish Nightly Nushell Containers + command: | + docker push quay.io/nushell/nu-base:nightly + docker push quay.io/nushell/nu:nightly diff --git a/.editorconfig b/.editorconfig index f6fb9f98d9..c5d100a733 100644 --- a/.editorconfig +++ b/.editorconfig @@ -6,4 +6,9 @@ indent_size = 4 charset = utf-8 trim_trailing_whitespace = true insert_final_newline = false -end_of_line = lf \ No newline at end of file +end_of_line = lf + +[*.{yml,yaml}] +indent_size = 2 +charset = utf-8 +insert_final_newline = true \ No newline at end of file diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml new file mode 100644 index 0000000000..e84cefd3ab --- /dev/null +++ b/.github/workflows/docker-publish.yml @@ -0,0 +1,98 @@ +name: Publish consumable Docker images + +on: + push: + tags: ['*.*.*'] + +jobs: + compile: + runs-on: ubuntu-latest + strategy: + matrix: + arch: + - x86_64-unknown-linux-musl + - x86_64-unknown-linux-gnu + steps: + - uses: actions/checkout@v1 + - run: cargo install cross + - name: compile for specific target + env: { arch: '${{ matrix.arch }}' } + run: | + cross build --target ${{ matrix.arch }} --release + # leave only the executable file + rm -rd target/${{ matrix.arch }}/release/{*/*,*.d,*.rlib,.fingerprint} + find . -empty -delete + - uses: actions/upload-artifact@master + with: + name: ${{ matrix.arch }} + path: target/${{ matrix.arch }}/release + + docker: + name: Build and publish docker images + needs: compile + runs-on: ubuntu-latest + strategy: + matrix: + tag: + - alpine + - slim + - debian + - glibc-busybox + - musl-busybox + - musl-distroless + - glibc-distroless + - glibc + - musl + include: + - { tag: alpine, base-image: alpine, arch: x86_64-unknown-linux-musl, plugin: true } + - { tag: slim, base-image: 'debian:stable-slim', arch: x86_64-unknown-linux-gnu, plugin: true } + - { tag: debian, base-image: debian, arch: x86_64-unknown-linux-gnu, plugin: true } + - { tag: glibc-busybox, base-image: 'busybox:glibc', arch: x86_64-unknown-linux-gnu, use-patch: true } + - { tag: musl-busybox, base-image: 'busybox:musl', arch: x86_64-unknown-linux-musl, } + - { tag: musl-distroless, base-image: 'gcr.io/distroless/static', arch: x86_64-unknown-linux-musl, } + - { tag: glibc-distroless, base-image: 'gcr.io/distroless/cc', arch: x86_64-unknown-linux-gnu, use-patch: true } + - { tag: glibc, base-image: scratch, arch: x86_64-unknown-linux-gnu, } + - { tag: musl, base-image: scratch, arch: x86_64-unknown-linux-musl, } + steps: + - uses: actions/checkout@v1 + - uses: actions/download-artifact@master + with: { name: '${{ matrix.arch }}', path: target/release } + - name: Build and publish exact version + run: | + REGISTRY=${REGISTRY,,}; export TAG=${GITHUB_REF##*/}-${{ matrix.tag }}; + export NU_BINS=target/release/$( [ ${{ matrix.plugin }} = true ] && echo nu* || echo nu ) + export PATCH=$([ ${{ matrix.use-patch }} = true ] && echo .${{ matrix.tag }} || echo '') + chmod +x $NU_BINS + + echo ${{ secrets.DOCKER_REGISTRY }} | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin + docker-compose --file docker/docker-compose.package.yml build + docker-compose --file docker/docker-compose.package.yml push # exact version + env: + BASE_IMAGE: ${{ matrix.base-image }} + REGISTRY: docker.pkg.github.com/${{ github.repository }} + + #region semantics tagging + - name: Retag and push without suffixing version + run: | + VERSION=${GITHUB_REF##*/} + docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${{ matrix.tag }} + docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%%.*}-${{ matrix.tag }} + docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%.*}-${{ matrix.tag }} + docker push ${REGISTRY,,}/nu:${VERSION%.*}-${{ matrix.tag }} # latest patch + docker push ${REGISTRY,,}/nu:${VERSION%%.*}-${{ matrix.tag }} # latest features + docker push ${REGISTRY,,}/nu:${{ matrix.tag }} # latest version + env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' } + - name: Retag and push debian as latest + if: matrix.tag == 'debian' + run: | + VERSION=${GITHUB_REF##*/} + docker tag ${REGISTRY,,}/nu:${{ matrix.tag }} ${REGISTRY,,}/nu:latest + docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%.*} + docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%%.*} + docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION} + docker push ${REGISTRY,,}/nu:${VERSION} # exact version + docker push ${REGISTRY,,}/nu:${VERSION%%.*} # latest features + docker push ${REGISTRY,,}/nu:${VERSION%.*} # latest patch + docker push ${REGISTRY,,}/nu:latest # latest version + env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' } + #endregion semantics tagging diff --git a/.gitignore b/.gitignore index 2026921b61..d8aedd6cec 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,10 @@ **/*.rs.bk history.txt tests/fixtures/nuplayground + +# Debian/Ubuntu +debian/.debhelper/ +debian/debhelper-build-stamp +debian/files +debian/nu.substvars +debian/nu/ diff --git a/.gitpod.Dockerfile b/.gitpod.Dockerfile new file mode 100644 index 0000000000..b832017f62 --- /dev/null +++ b/.gitpod.Dockerfile @@ -0,0 +1,7 @@ +FROM gitpod/workspace-full +USER root +RUN apt-get update && apt-get install -y libssl-dev \ + libxcb-composite0-dev \ + pkg-config \ + curl \ + rustc diff --git a/.gitpod.yml b/.gitpod.yml new file mode 100644 index 0000000000..adb894f2d3 --- /dev/null +++ b/.gitpod.yml @@ -0,0 +1,21 @@ +image: + file: .gitpod.Dockerfile +tasks: + - init: cargo build + command: cargo run +github: + prebuilds: + # enable for the master/default branch (defaults to true) + master: true + # enable for all branches in this repo (defaults to false) + branches: true + # enable for pull requests coming from this repo (defaults to true) + pullRequests: true + # enable for pull requests coming from forks (defaults to false) + pullRequestsFromForks: true + # add a "Review in Gitpod" button as a comment to pull requests (defaults to true) + addComment: true + # add a "Review in Gitpod" button to pull requests (defaults to false) + addBadge: false + # add a label once the prebuild is ready to pull requests (defaults to false) + addLabel: prebuilt-in-gitpod diff --git a/Cargo.lock b/Cargo.lock index b8ec4ee79c..9f8ebfe787 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,7 +2,7 @@ # It is not intended for manual editing. [[package]] name = "adler32" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -23,7 +23,7 @@ dependencies = [ [[package]] name = "ansi_term" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -47,10 +47,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "arrayvec" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "async-stream" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "async-stream-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "async-stream-impl" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -58,24 +77,24 @@ name = "atty" version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "autocfg" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "backtrace" -version = "0.3.34" +version = "0.3.38" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -83,8 +102,8 @@ name = "backtrace-sys" version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -100,10 +119,10 @@ name = "battery" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -116,35 +135,35 @@ name = "bigdecimal" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bincode" -version = "1.1.4" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bitflags" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "blake2b_simd" -version = "0.5.6" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -158,37 +177,37 @@ version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "chrono 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "decimal 2.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "md5 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bstr" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bumpalo" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "byte-unit" -version = "3.0.1" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -207,7 +226,7 @@ version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -215,29 +234,33 @@ name = "c2-chacha" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "cc" -version = "1.0.38" +version = "1.0.45" source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", +] [[package]] name = "cfg-if" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "chrono" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -246,7 +269,7 @@ name = "chrono-humanize" version = "0.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "chrono 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -256,7 +279,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -288,7 +311,7 @@ name = "cloudabi" version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -296,19 +319,19 @@ name = "config" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "constant_time_eq" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -317,7 +340,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -330,22 +353,7 @@ name = "crc32fast" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "crossbeam" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-deque 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-epoch 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", - "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -356,41 +364,6 @@ dependencies = [ "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "crossbeam-deque" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "crossbeam-epoch 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "memoffset 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "crossbeam-utils" version = "0.5.0" @@ -401,8 +374,8 @@ name = "crossbeam-utils" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -436,7 +409,7 @@ dependencies = [ "crossterm_screen 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_utils 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -468,7 +441,7 @@ dependencies = [ "crossterm_cursor 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_utils 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -477,7 +450,7 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -494,11 +467,11 @@ name = "csv" version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bstr 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -511,11 +484,11 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.9" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -529,30 +502,29 @@ dependencies = [ [[package]] name = "curl" -version = "0.4.22" +version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", - "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)", - "schannel 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)", + "schannel 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", "socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "curl-sys" -version = "0.4.20" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -563,7 +535,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "darwin-libproc-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -572,7 +544,7 @@ name = "darwin-libproc-sys" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -580,12 +552,12 @@ name = "decimal" version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "ord_subset 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -593,7 +565,7 @@ name = "deflate" version = "0.7.20" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -602,9 +574,9 @@ name = "derive-new" version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -617,7 +589,7 @@ name = "directories" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -626,7 +598,7 @@ name = "dirs" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -636,7 +608,7 @@ name = "dirs" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -645,8 +617,8 @@ name = "dirs-sys" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -663,66 +635,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "either" -version = "1.5.2" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "encode_unicode" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "enum-utils" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "enum-utils-from-str 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive_internals 0.24.1 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "enum-utils-from-str" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "env_logger" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "failure" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "backtrace 0.3.34 (registry+https://github.com/rust-lang/crates.io-index)", - "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "backtrace 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", + "failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "failure_derive" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", - "synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "synstructure 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -742,13 +692,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "flate2" -version = "1.0.9" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz_oxide_c_api 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "miniz_oxide 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -763,29 +713,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "futures" -version = "0.1.28" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "futures-async-stream" -version = "0.1.0-alpha.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "futures-async-stream-macro 0.1.0-alpha.5 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "pin-project 0.4.0-alpha.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "futures-async-stream-macro" -version = "0.1.0-alpha.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "futures-channel-preview" version = "0.3.0-alpha.18" @@ -838,10 +768,11 @@ dependencies = [ [[package]] name = "futures-timer" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -850,7 +781,7 @@ name = "futures-util-preview" version = "0.3.0-alpha.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", @@ -872,11 +803,12 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.1.8" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -884,19 +816,19 @@ name = "getset" version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "git2" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "libgit2-sys 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "libgit2-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -916,32 +848,33 @@ dependencies = [ [[package]] name = "heim" -version = "0.0.7" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-cpu 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-disk 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-host 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-memory 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-net 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-process 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-sensors 0.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-virt 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-disk 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-memory 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-process 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-sensors 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-virt 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-common" -version = "0.0.7" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -951,41 +884,41 @@ dependencies = [ [[package]] name = "heim-cpu" -version = "0.0.7" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-derive" -version = "0.0.7" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-disk" -version = "0.0.7" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -993,103 +926,105 @@ dependencies = [ [[package]] name = "heim-host" -version = "0.0.7" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "platforms 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "platforms 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-memory" -version = "0.0.7" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-net" -version = "0.0.7" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "macaddr 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-process" -version = "0.0.7" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "darwin-libproc 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-cpu 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-net 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-runtime" -version = "0.0.3" +version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-sensors" -version = "0.0.2" +version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-derive 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "heim-virt" -version = "0.0.7" +version = "0.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", - "heim-runtime 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "raw-cpuid 6.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1097,6 +1032,11 @@ name = "hex" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "hex" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "http" version = "0.1.18" @@ -1109,7 +1049,7 @@ dependencies = [ [[package]] name = "humantime" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1127,12 +1067,11 @@ dependencies = [ [[package]] name = "image" -version = "0.22.1" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "jpeg-decoder 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", - "lzw 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", "num-iter 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", "num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1141,10 +1080,10 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1152,35 +1091,34 @@ name = "inflate" version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "iovec" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "isahc" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", - "curl 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)", - "curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)", "futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "sluice 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "sluice 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1188,8 +1126,8 @@ name = "isatty" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1199,7 +1137,7 @@ name = "itertools" version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1207,7 +1145,7 @@ name = "itertools" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1215,9 +1153,19 @@ name = "itoa" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "jobserver" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "jpeg-decoder" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1225,10 +1173,10 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1242,15 +1190,15 @@ dependencies = [ [[package]] name = "language-reporting" -version = "0.3.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "render-tree 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1261,7 +1209,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "lazy_static" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1271,30 +1219,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "lexical-core" -version = "0.4.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "stackvector 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "static_assertions 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libc" -version = "0.2.60" +version = "0.2.62" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "libgit2-sys" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1302,8 +1250,8 @@ name = "libnghttp2-sys" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1311,8 +1259,8 @@ name = "libsqlite3-sys" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1321,9 +1269,9 @@ name = "libz-sys" version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1332,7 +1280,7 @@ name = "line-wrap" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "safemem 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "safemem 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1349,21 +1297,12 @@ name = "linked-hash-map" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "lock_api" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "log" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1374,11 +1313,6 @@ dependencies = [ "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "lzw" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "macaddr" version = "0.1.1" @@ -1389,7 +1323,7 @@ name = "mach" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1397,7 +1331,7 @@ name = "mach" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1405,7 +1339,7 @@ name = "malloc_buf" version = "0.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1423,78 +1357,47 @@ name = "memchr" version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "memoffset" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "memoffset" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "mime" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] [[package]] name = "mime_guess" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "mime 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", - "unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "miniz-sys" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", + "unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "miniz_oxide" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "miniz_oxide_c_api" -version = "0.2.3" +name = "natural" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz_oxide 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] [[package]] name = "neso" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1502,10 +1405,10 @@ name = "nix" version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1514,16 +1417,16 @@ name = "nix" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "nodrop" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1537,22 +1440,41 @@ dependencies = [ [[package]] name = "nom" -version = "5.0.0" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lexical-core 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "nom5_locate" -version = "0.1.1" +name = "nom-tracable" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "nom-tracable-macros" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "nom_locate" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytecount 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1565,17 +1487,18 @@ dependencies = [ [[package]] name = "nu" -version = "0.2.0" +version = "0.4.1" dependencies = [ - "ansi_term 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", "app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "async-stream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "battery 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)", - "byte-unit 3.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "byte-unit 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "chrono 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "chrono-humanize 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)", "clipboard 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1585,47 +1508,49 @@ dependencies = [ "derive-new 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "dunce 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "enum-utils 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-async-stream 0.1.0-alpha.5 (registry+https://github.com/rust-lang/crates.io-index)", "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-timer 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-timer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "getset 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "heim 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "heim 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "image 0.22.1 (registry+https://github.com/rust-lang/crates.io-index)", - "indexmap 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)", + "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "language-reporting 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "language-reporting 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "mime 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", + "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", + "natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "nom5_locate 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", - "pretty-hex 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pretty-hex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "ptree 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "roxmltree 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rustyline 5.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rustyline 5.0.3 (git+https://github.com/kkawakam/rustyline.git)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_bytes 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)", "shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "sublime_fuzzy 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", "surf 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "syntect 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1633,20 +1558,21 @@ dependencies = [ "term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", + "trash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num-bigint" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1654,7 +1580,7 @@ name = "num-integer" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1663,7 +1589,7 @@ name = "num-iter" version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1673,7 +1599,7 @@ name = "num-rational" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1691,7 +1617,7 @@ name = "num-traits" version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1699,7 +1625,7 @@ name = "num_cpus" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1739,12 +1665,12 @@ dependencies = [ [[package]] name = "onig" -version = "4.3.2" +version = "4.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1753,8 +1679,8 @@ name = "onig_sys" version = "69.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1764,13 +1690,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "openssl-sys" -version = "0.9.49" +version = "0.9.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1800,35 +1726,6 @@ dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "owning_ref" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "parking_lot" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "parking_lot_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "parking_lot_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "percent-encoding" version = "2.1.0" @@ -1843,24 +1740,6 @@ dependencies = [ "ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "pin-project" -version = "0.4.0-alpha.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "pin-project-internal 0.4.0-alpha.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "pin-project-internal" -version = "0.4.0-alpha.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "pin-utils" version = "0.1.0-alpha.4" @@ -1868,12 +1747,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pkg-config" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "platforms" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1883,9 +1762,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "line-wrap 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1894,7 +1773,7 @@ name = "png" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "deflate 0.7.20 (registry+https://github.com/rust-lang/crates.io-index)", "inflate 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1907,7 +1786,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pretty-hex" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1916,7 +1795,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ctor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "ctor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1926,7 +1805,7 @@ name = "pretty_env_logger" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "chrono 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1938,23 +1817,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "csv 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "encode_unicode 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "proc-macro2" -version = "0.4.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "proc-macro2" -version = "1.0.1" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1970,9 +1841,9 @@ dependencies = [ "directories 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "tint 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1981,80 +1852,33 @@ name = "quick-error" version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "quote" -version = "0.6.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "quote" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rand" -version = "0.5.6" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand_chacha" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "rand_chacha" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2072,18 +1896,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rand_core" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_hc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2091,25 +1907,7 @@ name = "rand_hc" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_isaac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_jitter" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2119,36 +1917,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "rand_pcg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand_xorshift" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "raw-cpuid" -version = "6.1.0" +version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2186,20 +1967,20 @@ name = "redox_users" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", - "rust-argon2 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "regex" -version = "1.2.1" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2213,7 +1994,7 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.11" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2241,10 +2022,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "roxmltree" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "xmlparser 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "xmlparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2252,7 +2033,7 @@ name = "rusqlite" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "fallible-streaming-iterator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "libsqlite3-sys 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2263,12 +2044,12 @@ dependencies = [ [[package]] name = "rust-argon2" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", - "blake2b_simd 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "blake2b_simd 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2278,7 +2059,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rustc-demangle" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2296,11 +2077,11 @@ dependencies = [ [[package]] name = "rustyline" -version = "5.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" +version = "5.0.3" +source = "git+https://github.com/kkawakam/rustyline.git#449c811998f630102bb2d9fb0b59b890d9eabac5" dependencies = [ "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2312,12 +2093,12 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "safemem" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2330,23 +2111,13 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "scopeguard" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "scopeguard" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "semver" version = "0.9.0" @@ -2367,10 +2138,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde" -version = "1.0.99" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2381,7 +2152,7 @@ dependencies = [ "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2390,10 +2161,10 @@ name = "serde-hjson" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2403,7 +2174,7 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2411,26 +2182,17 @@ name = "serde_bytes" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_derive" -version = "1.0.98" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "serde_derive_internals" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2439,19 +2201,19 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_json" -version = "1.0.40" +version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "indexmap 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2469,18 +2231,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_yaml" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2505,7 +2267,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "sluice" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2523,8 +2285,8 @@ name = "socket2" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2534,23 +2296,9 @@ name = "sourcefile" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "stable_deref_trait" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "stackvector" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "static_assertions" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2558,13 +2306,18 @@ name = "strsim" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "sublime_fuzzy" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "subprocess" version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2575,49 +2328,39 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "isahc 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "isahc 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "mime 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", + "mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-futures 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)", - "web-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-futures 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "syn" -version = "0.15.43" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "syn" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "synstructure" -version = "0.10.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2625,18 +2368,18 @@ name = "syntect" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "flate2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)", + "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "onig 4.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "onig 4.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "plist 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2646,9 +2389,9 @@ name = "tempfile" version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2670,7 +2413,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2679,7 +2422,7 @@ name = "termcolor" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "wincolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "wincolor 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2696,7 +2439,7 @@ name = "thread_local" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2712,7 +2455,7 @@ name = "time" version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2731,7 +2474,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2740,7 +2483,7 @@ name = "toml" version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2748,17 +2491,25 @@ name = "toml" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "trash" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "typenum" -version = "1.10.0" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "unicase" -version = "2.4.0" +version = "2.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2790,31 +2541,18 @@ name = "unicode-width" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "unicode-xid" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "unicode-xid" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "unreachable" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "uom" version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2824,7 +2562,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2851,15 +2589,6 @@ name = "utf8parse" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "uuid" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "vcpkg" version = "0.2.7" @@ -2890,93 +2619,100 @@ dependencies = [ "winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "wasi" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "wasm-bindgen" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-macro 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bumpalo 2.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-futures" -version = "0.3.25" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-macro-support 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro-support 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "wasm-bindgen-webidl" -version = "0.2.50" +version = "0.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", "weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "web-sys" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-webidl 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-webidl 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2992,8 +2728,8 @@ name = "which" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3040,7 +2776,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "wincolor" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3052,8 +2788,8 @@ name = "x11" version = "2.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3069,7 +2805,7 @@ name = "xcb" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -3085,7 +2821,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "xmlparser" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -3097,50 +2833,48 @@ dependencies = [ ] [metadata] -"checksum adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7e522997b529f05601e05166c07ed17789691f562762c7f3b987263d2dedee5c" +"checksum adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5d2e7343e7fc9de883d1b0341e0b13970f764c14101234857d2ddafa1cb1cac2" "checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d" "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" -"checksum ansi_term 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eaa72766c3585a1f812a3387a7e2c6cab780f899c2f43ff6ea06c8d071fcbb36" +"checksum ansi_term 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" "checksum app_dirs 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e73a24bad9bd6a94d6395382a6c69fe071708ae4409f763c5475e14ee896313d" "checksum arrayref 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0d382e583f07208808f6b1249e60848879ba3543f57c32277bf52d69c2f0f0ee" -"checksum arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b8d73f9beda665eaa98ab9e4f7442bd4e7de6652587de55b2525e52e29c1b0ba" +"checksum arrayvec 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" +"checksum async-stream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "650be9b667e47506c42ee53034fb1935443cb2447a3a5c0a75e303d2e756fa73" +"checksum async-stream-impl 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4f0d8c5b411e36dcfb04388bacfec54795726b1f0148adcb0f377a96d6747e0e" "checksum atty 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90" -"checksum autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "22130e92352b948e7e82a49cdb0aa94f2211761117f29e052dd397c1ac33542b" -"checksum backtrace 0.3.34 (registry+https://github.com/rust-lang/crates.io-index)" = "b5164d292487f037ece34ec0de2fcede2faa162f085dd96d2385ab81b12765ba" +"checksum autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b671c8fb71b457dd4ae18c4ba1e59aa81793daacc361d82fcd410cef0d491875" +"checksum backtrace 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)" = "690a62be8920ccf773ee00ef0968649b0e724cda8bd5b12286302b4ae955fdf5" "checksum backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "82a830b4ef2d1124a711c71d263c5abdc710ef8e907bd508c88be475cebc422b" "checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" "checksum battery 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6d6fe5630049e900227cd89afce4c1204b88ec8e61a2581bb96fcce26f047b" "checksum bigdecimal 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "460825c9e21708024d67c07057cd5560e5acdccac85de0de624a81d3de51bacb" -"checksum bincode 1.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9f04a5e50dc80b3d5d35320889053637d15011aed5e66b66b37ae798c65da6f7" -"checksum bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d155346769a6855b86399e9bc3814ab343cd3d62c7e985113d46a0ec3c281fd" -"checksum blake2b_simd 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "461f4b879a8eb70c1debf7d0788a9a5ff15f1ea9d25925fea264ef4258bed6b2" +"checksum bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ab639324e3ee8774d296864fbc0dbbb256cf1a41c490b94cba90c082915f92" +"checksum bitflags 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8a606a02debe2813760609f57a64a2ffd27d9fdf5b2f133eaca0b248dd92cdd2" +"checksum blake2b_simd 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5850aeee1552f495dd0250014cf64b82b7c8879a89d83b33bbdace2cc4f63182" "checksum block 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a" "checksum bson 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d61895d21e2194d1ce1d434cff69025daac1e49a8b4698eb04b05722dbc08b33" -"checksum bstr 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e0a692f1c740e7e821ca71a22cf99b9b2322dfa94d10f71443befb1797b3946a" -"checksum bumpalo 2.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2cd43d82f27d68911e6ee11ee791fb248f138f5d69424dc02e098d4f152b0b05" -"checksum byte-unit 3.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90139954ec9776c4832d44f212e558ccdacbe915a881bf3de3a1a487fa8d1e87" +"checksum bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8d6c2c5b58ab920a4f5aeaaca34b4488074e8cc7596af94e6f8c6ff247c60245" +"checksum bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ad807f2fc2bf185eeb98ff3a901bd46dc5ad58163d0fa4577ba0d25674d71708" +"checksum byte-unit 3.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6894a79550807490d9f19a138a6da0f8830e70c83e83402dd23f16fd6c479056" "checksum bytecount 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f861d9ce359f56dbcb6e0c2a1cb84e52ad732cadb57b806adeb3c7668caccbd8" "checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" "checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" "checksum c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7d64d04786e0f528460fc884753cf8dddcc466be308f6026f8e355c41a0e4101" -"checksum cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)" = "ce400c638d48ee0e9ab75aef7997609ec57367ccfe1463f21bf53c3eca67bf46" -"checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33" -"checksum chrono 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "77d81f58b7301084de3b958691458a53c3f7e0b1d702f77e550b6a88e3a88abe" +"checksum cc 1.0.45 (registry+https://github.com/rust-lang/crates.io-index)" = "4fc9a35e1f4290eb9e5fc54ba6cf40671ed2a2514c3eeb2b2a908dda2ea5a1be" +"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" +"checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68" "checksum chrono-humanize 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2ff48a655fe8d2dae9a39e66af7fd8ff32a879e8c4e27422c25596a8b5e90d" "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" "checksum clipboard 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "25a904646c0340239dcf7c51677b33928bf24fdf424b79a57909c0109075b2e7" "checksum clipboard-win 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e3a093d6fed558e5fe24c3dfc85a68bb68f1c824f440d3ba5aca189e2998786b" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum config 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9107d78ed62b3fa5a86e7d18e647abed48cfd8f8fab6c72f4cdb982d196f7e6" -"checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" +"checksum constant_time_eq 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "995a44c877f9212528ccc74b21a232f66ad69001e40ede5bcee2ac9ef2657120" "checksum core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d" "checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" "checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" -"checksum crossbeam 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d1c92ff2d7a202d592f5a412d75cf421495c913817781c1cb383bf12a77e185f" "checksum crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c8ec7fcd21571dc78f96cc96243cab8d8f035247c3efd16c687be154c3fa9efa" -"checksum crossbeam-deque 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "05e44b8cf3e1a625844d1750e1f7820da46044ff6d28f4d43e455ba3e5bb2c13" -"checksum crossbeam-epoch 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2449aaa4ec7ef96e5fb24db16024b935df718e9ae1cec0a1e68feeca2efca7b8" -"checksum crossbeam-epoch 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fedcd6772e37f3da2a9af9bf12ebe046c0dfe657992377b4df982a2b54cd37a9" "checksum crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "677d453a17e8bd2b913fa38e8b9cf04bcdbb5be790aa294f2389661d72036015" "checksum crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6" "checksum crossterm 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9abce7d7c50e9823ea0c0dbeb8f16d7e247af06d75b4c6244ea0a0998b3a6f35" @@ -3153,10 +2887,10 @@ dependencies = [ "checksum crossterm_winapi 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b055e7cc627c452e6a9b977022f48a2db6f0ff73df446ca970f95eef9c381d45" "checksum csv 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "37519ccdfd73a75821cac9319d4fce15a81b9fcf75f951df5b9988aa3a0af87d" "checksum csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9b5cadb6b25c77aeff80ba701712494213f4a8418fcda2ee11b6560c3ad0bf4c" -"checksum ctor 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3b4c17619643c1252b5f690084b82639dd7fac141c57c8e77a00e0148132092c" +"checksum ctor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "cd8ce37ad4184ab2ce004c33bf6379185d3b1c95801cab51026bd271bf68eedc" "checksum ctrlc 3.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7dfd2d8b4c82121dfdff120f818e09fc4380b0b7e17a742081a89b94853e87f" -"checksum curl 0.4.22 (registry+https://github.com/rust-lang/crates.io-index)" = "f8ed9a22aa8c4e49ac0c896279ef532a43a7df2f54fcd19fa36960de029f965f" -"checksum curl-sys 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "5e90ae10f635645cba9cad1023535f54915a95c58c44751c6ed70dbaeb17a408" +"checksum curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)" = "06aa71e9208a54def20792d877bc663d6aae0732b9852e612c4a933177c31283" +"checksum curl-sys 0.4.23 (registry+https://github.com/rust-lang/crates.io-index)" = "f71cd2dbddb49c744c1c9e0b96106f50a634e8759ec51bcd5399a578700a3ab3" "checksum darwin-libproc 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ade5a88af8d9646bf770687321a9488a0f2b4610aa08b0373016cd1af37f0a31" "checksum darwin-libproc-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c30d1a078d74da1183b02fed8a8b07afc412d3998334b53b750d0ed03b031541" "checksum decimal 2.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e6458723bc760383275fbc02f4c769b2e5f3de782abaf5e7e0b9b7f0368a63ed" @@ -3169,81 +2903,77 @@ dependencies = [ "checksum dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "afa0b23de8fd801745c471deffa6e12d248f962c9fd4b4c33787b055599bde7b" "checksum dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ea57b42383d091c85abcc2706240b94ab2a8fa1fc81c10ff23c4de06e2a90b5e" "checksum dunce 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0ad6bf6a88548d1126045c413548df1453d9be094a8ab9fd59bf1fdd338da4f" -"checksum either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5527cfe0d098f36e3f8839852688e63c8fff1c90b2b405aef730615f9a7bcf7b" -"checksum encode_unicode 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "90b2c9496c001e8cb61827acdefad780795c42264c137744cae6f7d9e3450abd" -"checksum enum-utils 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f1ae672d9891879fb93e17ab6015c4e3bbe63fbeb23a41b9ac39ffa845b8836" -"checksum enum-utils-from-str 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6b5669381f76d7320e122abdd4a8307f986634f6d067fb69e31179422175801a" +"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" +"checksum encode_unicode 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" "checksum env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aafcde04e90a5226a6443b7aabdb016ba2f8307c847d524724bd9b346dd1a2d3" -"checksum failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2" -"checksum failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1" +"checksum failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f8273f13c977665c5db7eb2b99ae520952fe5ac831ae4cd09d80c4c7042b5ed9" +"checksum failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0bc225b78e0391e4b8683440bf2e63c2deeeb2ce5189eab46e2b68c6d3725d08" "checksum fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" "checksum fallible-streaming-iterator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" -"checksum flate2 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "550934ad4808d5d39365e5d61727309bf18b3b02c6c56b729cb92e7dd84bc3d8" +"checksum flate2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ad3c5233c9a940c8719031b423d7e6c16af66e031cb0420b0896f5245bf181d3" "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" -"checksum futures 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "45dc39533a6cae6da2b56da48edae506bb767ec07370f86f70fc062e9d435869" -"checksum futures-async-stream 0.1.0-alpha.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f6311b428f208a8e7294aad3ddfa695cd68163e49880f4a3c3705e94c613c99b" -"checksum futures-async-stream-macro 0.1.0-alpha.5 (registry+https://github.com/rust-lang/crates.io-index)" = "c7665811c2ea29c7fd309e48b1c1f52538b50fda641616a11eedadcf23ad29da" +"checksum futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "1b980f2816d6ee8673b6517b52cb0e808a180efc92e5c19d02cdda79066703ef" "checksum futures-channel-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "f477fd0292c4a4ae77044454e7f2b413207942ad405f759bb0b4698b7ace5b12" "checksum futures-core-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "4a2f26f774b81b3847dcda0c81bd4b6313acfb4f69e5a0390c7cb12c058953e9" "checksum futures-executor-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "80705612926df8a1bc05f0057e77460e29318801f988bf7d803a734cf54e7528" "checksum futures-io-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "ee7de0c1c9ed23f9457b0437fec7663ce64d9cc3c906597e714e529377b5ddd1" "checksum futures-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "efa8f90c4fb2328e381f8adfd4255b4a2b696f77d1c63a3dee6700b564c4e4b5" "checksum futures-sink-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "e9b65a2481863d1b78e094a07e9c0eed458cc7dc6e72b22b7138b8a67d924859" -"checksum futures-timer 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8f9eb554aa23143abc64ec4d0016f038caf53bb7cbc3d91490835c54edc96550" +"checksum futures-timer 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "878f1d2fc31355fa02ed2372e741b0c17e58373341e6a122569b4623a14a7d33" "checksum futures-util-preview 0.3.0-alpha.18 (registry+https://github.com/rust-lang/crates.io-index)" = "7df53daff1e98cc024bf2720f3ceb0414d96fbb0a94f3cad3a5c3bf3be1d261c" "checksum futures_codec 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "36552cd31353fd135114510d53b8d120758120c36aa636a9341970f9efb1e4a0" -"checksum getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "34f33de6f0ae7c9cb5e574502a562e2b512799e32abb801cd1e79ad952b62b49" +"checksum getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "473a1265acc8ff1e808cd0a1af8cee3c2ee5200916058a2ca113c29f2d903571" "checksum getset 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "117a5b13aecd4e10161bb3feb22dda898e8552836c2391d8e4645d5e703ab866" -"checksum git2 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "327d698f86a7ebdfeb86a4238ccdb004828939d3a3555b6ead679541d14e36c0" +"checksum git2 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39f27186fbb5ec67ece9a56990292bc5aed3c3fc51b9b07b0b52446b1dfb4a82" "checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" -"checksum heim 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "4e61bf22465c7f49852fd7e6044a395394962a2eaac0b5c1b87b5b0f010b0f48" -"checksum heim-common 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "81e38b3fc29d7888133d0ada8bc083487386fd930f3c8fd34a528a2aa4352a3a" -"checksum heim-cpu 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "307f12a429cfe56c92413f98a6e1a28f72d715b9f65fbfdf2e98f15bd38293c6" -"checksum heim-derive 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "addd10c94d06b172f816a1969253c2dd8a3f633e165d8e018e0be873d67f8cac" -"checksum heim-disk 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "2ee4860d01ea623512bcd1d2d54e4566d482f2d4568789562b13d4b8cc294f00" -"checksum heim-host 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1c6dee47910be9b5fb323ec6bf7462773a8bee67b65e5fe5d652f3e20b3ecab9" -"checksum heim-memory 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "30f5e88edcafd7ee6061997d171f84c153fabdd6459d739b45d7f05193d7f98c" -"checksum heim-net 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5e5b265598f9d3ca525f54a394153e3e738af9795ac5be7c364d55a7be857e69" -"checksum heim-process 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "2165577ccfce4d038de4ca66cbb5c226e1691dff62c778cac6717455dc9ef28d" -"checksum heim-runtime 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2c4c23e20c02d9df62dbed41273e99ad70c9ebd8799f35ac672086f8cc584d09" -"checksum heim-sensors 0.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e641fab2e31c4b2039451a713dc92a5daacf84c617c803c946b8081fe8132142" -"checksum heim-virt 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "331b1486ed710843c551ac3a8ddb2721dd5345b3939f995ce0dbe453ba901b06" +"checksum heim 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "de848466ae9659d5ab634615bdd0b7d558a41ae524ee4d59c880d12499af5b77" +"checksum heim-common 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "63f408c31e695732096a0383df16cd3efee4adb32ba3ad086fb85a7dc8f53100" +"checksum heim-cpu 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "5785004dfdbd68a814d504b27b8ddc16c748a856835dfb6e65b15142090664ef" +"checksum heim-derive 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "9573bedf4673c1b254bce7f1521559329d2b27995b693b695fa13be2b15c188b" +"checksum heim-disk 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c84980e62564828ae4ca70a8bfbdb0f139cc89abb6c91b8b4809518346a72366" +"checksum heim-host 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1de019d5969f6bab766311be378788bd1bb068b59c4f3861c539a420fc258ed3" +"checksum heim-memory 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "a9cdbe6433197da8387dcd0cf1afd9184db4385d55f8a76355b28ceabe99cdc5" +"checksum heim-net 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7b0f5e590eb2f8b23229ff4b06f7e7aee0e229837d3697f362014343682ae073" +"checksum heim-process 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "a64874316339b9c0c7953e7a87d2b32e2400bf6778650ac11b76b05d3c37e121" +"checksum heim-runtime 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "13ef10b5ab5a501e6537b1414db0e3c488425d88bb131bd4e9ff7c0e61e5fbd1" +"checksum heim-sensors 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ad8b3c9032bca1a76dd43e1eb5c8044e0c505343cb21949dc7acd1bc55b408b" +"checksum heim-virt 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "bb2dda5314da10a8fbcdf130c065abc65f02c3ace72c6f143ad4537520536e2b" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" +"checksum hex 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "023b39be39e3a2da62a94feb433e91e8bcd37676fbc8bea371daf52b7a769a3e" "checksum http 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "372bcb56f939e449117fb0869c2e8fd8753a8223d92a172c6e808cf123a5b6e4" -"checksum humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca7e5f2e110db35f93b837c81797f3714500b81d517bf20c431b16d3ca4f114" +"checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" "checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" -"checksum image 0.22.1 (registry+https://github.com/rust-lang/crates.io-index)" = "663a975007e0b49903e2e8ac0db2c432c465855f2d65f17883ba1476e85f0b42" -"checksum indexmap 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a4d6d89e0948bf10c08b9ecc8ac5b83f07f857ebe2c0cbe38de15b4e4f510356" +"checksum image 0.22.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b4be8aaefbe7545dc42ae925afb55a0098f226a3fe5ef721872806f44f57826" +"checksum indexmap 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a61202fbe46c4a951e9404a720a0180bcf3212c750d735cb5c4ba4dc551299f3" "checksum inflate 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "1cdb29978cc5797bd8dcc8e5bf7de604891df2a8dc576973d71a281e916db2ff" -"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08" -"checksum isahc 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e1b971511b5d8de4a51d4da4bc8e374bf60ce841e91b116f46ae06ae2e2a8e9b" +"checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +"checksum isahc 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "769f5071e5bf0b45489eefe0ec96b97328675db38d02ea5e923519d52e690cb8" "checksum isatty 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e31a8281fc93ec9693494da65fbf28c0c2aa60a2eaec25dc58e2f31952e95edc" "checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d" "checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" "checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f" -"checksum jpeg-decoder 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "c8b7d43206b34b3f94ea9445174bda196e772049b9bddbc620c9d29b2d20110d" -"checksum js-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "1efc4f2a556c58e79c5500912e221dd826bec64ff4aabd8ce71ccef6da02d7d4" +"checksum jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "f2b1d42ef453b30b7387e113da1c83ab1605d90c5b4e0eb8e96d016ed3b8c160" +"checksum jpeg-decoder 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "c1aae18ffeeae409c6622c3b6a7ee49792a7e5a062eea1b135fbb74e301792ba" +"checksum js-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "2cc9a97d7cec30128fd8b28a7c1f9df1c001ceb9b441e2b755e24130a6b43c79" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -"checksum language-reporting 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "628912b84af4304e1e7e78ebb6a1f503f3a973cba79d072d12e6eb40e7f815db" +"checksum language-reporting 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4e6a84e1e6cccd818617d299427ad1519f127af2738b1d3a581835ef56ae298b" "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" -"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14" +"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" -"checksum lexical-core 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b8b0f90c979adde96d19eb10eb6431ba0c441e2f9e9bdff868b2f6f5114ff519" -"checksum libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)" = "d44e80633f007889c7eff624b709ab43c92d708caad982295768a7b13ca3b5eb" -"checksum libgit2-sys 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8c2078aec6f4b16d1b89f6a72e4f6eb1e75ffa85312023291e89c6d3087bc8fb" +"checksum lexical-core 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2304bccb228c4b020f3a4835d247df0a02a7c4686098d4167762cfbbe4c5cb14" +"checksum libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "34fcd2c08d2f832f376f4173a231990fa5aef4e99fb569867318a227ef4c06ba" +"checksum libgit2-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a30f8637eb59616ee3b8a00f6adff781ee4ddd8343a615b8238de756060cc1b3" "checksum libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02254d44f4435dd79e695f2c2b83cd06a47919adea30216ceaf0c57ca0a72463" "checksum libsqlite3-sys 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5e5b95e89c330291768dc840238db7f9e204fd208511ab6319b56193a7f2ae25" "checksum libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "2eb5e43362e38e2bca2fd5f5134c4d4564a23a5c28e9b95411652021a8675ebe" "checksum line-wrap 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9" "checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd" "checksum linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83" -"checksum lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "62ebf1391f6acad60e5c8b43706dde4582df75c06698ab44511d15016bc2442c" "checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7" "checksum lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" -"checksum lzw 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7d947cbb889ed21c2a84be6ffbaebf5b4e0f4340638cba0444907e38b56be084" "checksum macaddr 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3ff4752cb15cffb3e68f7dcb22e0818ac871f8c98fb07a634a81f41fb202a09f" "checksum mach 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "86dd2487cdfea56def77b88438a2c915fb45113c5319bfe7e14306ca4cd0b0e1" "checksum mach 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" @@ -3251,22 +2981,21 @@ dependencies = [ "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" "checksum md5 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e6bcd6433cff03a4bfc3d9834d504467db1f1cf6d0ea765d37d330249ed629d" "checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e" -"checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3" -"checksum memoffset 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ce6075db033bbbb7ee5a0bbd3a3186bbae616f57fb001c485c7ff77955f8177f" -"checksum mime 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)" = "3e27ca21f40a310bd06d9031785f4801710d566c184a6e15bad4f1d9b65f9425" +"checksum mime 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "dd1d63acd1b78403cc0c325605908475dd9b9a3acbf65ed8bcab97e27014afcf" "checksum mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1a0ed03949aef72dbdf3116a383d7b38b4768e6f960528cd6a6044aa9ed68599" -"checksum miniz-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "1e9e3ae51cea1576ceba0dde3d484d30e6e5b86dee0b2d412fe3a16a15c98202" -"checksum miniz_oxide 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fe2959c5a0747a8d7a56b4444c252ffd2dda5d452cfd147cdfdda73b1c3ece5b" -"checksum miniz_oxide_c_api 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6c675792957b0d19933816c4e1d56663c341dd9bfa31cb2140ff2267c1d8ecf4" +"checksum miniz_oxide 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "304f66c19be2afa56530fa7c39796192eef38618da8d19df725ad7c6d6b2aaae" +"checksum natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd659d7d6b4554da2c0e7a486d5952b24dfce0e0bac88ab53b270f4efe1010a6" "checksum neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6b3c31defbcb081163db18437fd88c2a267cb3e26f7bd5e4b186e4b1b38fe8c8" "checksum nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c722bee1037d430d0f8e687bbdbf222f27cc6e4e68d5caf630857bb2b6dbdce" "checksum nix 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3b2e0b4f3320ed72aaedb9a5ac838690a8047c7b275da22711fddff4f8a14229" -"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" +"checksum nodrop 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" "checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" -"checksum nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e9761d859320e381010a4f7f8ed425f2c924de33ad121ace447367c713ad561b" -"checksum nom5_locate 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3d4312467f8b28d909344b934207e502212fa5a3adf1bff7428b0b86a666223d" +"checksum nom 5.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c618b63422da4401283884e6668d39f819a106ef51f5f59b81add00075da35ca" +"checksum nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "edaa64ad2837d831d4a17966c9a83aa5101cc320730f5b724811c8f7442a2528" +"checksum nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd25f70877a9fe68bd406b3dd3ff99e94ce9de776cf2a96e0d99de90b53d4765" +"checksum nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f932834fd8e391fc7710e2ba17e8f9f8645d846b55aa63207e17e110a1e1ce35" "checksum ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f26e041cd983acbc087e30fcba770380cfa352d0e392e175b2344ebaf7ea0602" -"checksum num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "57450397855d951f1a41305e54851b1a7b8f5d2e349543a02a2effe25459f718" +"checksum num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9c3f34cdd24f334cb265d9bf8bfa8a241920d026916785747a92f0e55541a1a" "checksum num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09" "checksum num-iter 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "76bd5272412d173d6bf9afdf98db8612bbabc9a7a830b7bfc9c188911716132e" "checksum num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f2885278d5fe2adc2f75ced642d52d879bffaceb5a2e0b1d4309ffdfb239b454" @@ -3277,109 +3006,89 @@ dependencies = [ "checksum objc-foundation 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1add1b659e36c9607c7aab864a76c7a4c2760cd0cd2e120f3fb8b952c7e22bf9" "checksum objc_id 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c92d4ddb4bd7b50d730c215ff871754d0da6b2178849f8a2a2ab69712d0c073b" "checksum ole32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d2c49021782e5233cd243168edfa8037574afed4eba4bbaf538b3d8d1789d8c" -"checksum onig 4.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a646989adad8a19f49be2090374712931c3a59835cb5277b4530f48b417f26e7" +"checksum onig 4.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8518fcb2b1b8c2f45f0ad499df4fda6087fc3475ca69a185c173b8315d2fb383" "checksum onig_sys 69.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388410bf5fa341f10e58e6db3975f4bea1ac30247dd79d37a9e5ced3cb4cc3b0" "checksum openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" -"checksum openssl-sys 0.9.49 (registry+https://github.com/rust-lang/crates.io-index)" = "f4fad9e54bd23bd4cbbe48fdc08a1b8091707ac869ef8508edea2fec77dcc884" +"checksum openssl-sys 0.9.51 (registry+https://github.com/rust-lang/crates.io-index)" = "ba24190c8f0805d3bd2ce028f439fe5af1d55882bbe6261bed1dbc93b50dd6b1" "checksum ord_subset 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d7ce14664caf5b27f5656ff727defd68ae1eb75ef3c4d95259361df1eb376bef" "checksum ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "18869315e81473c951eb56ad5558bbc56978562d3ecfb87abb7a1e944cea4518" "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" "checksum output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9" -"checksum owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49a4b8ea2179e6a2e27411d3bca09ca6dd630821cf6894c6c7c8467a8ee7ef13" -"checksum parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f0802bff09003b291ba756dc7e79313e51cc31667e94afbe847def490424cde5" -"checksum parking_lot_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad7f7e6ebdc79edff6fdcb87a55b620174f7a989e3eb31b65231f4af57f00b8c" "checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" "checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f" -"checksum pin-project 0.4.0-alpha.5 (registry+https://github.com/rust-lang/crates.io-index)" = "c6e7dd6a2ad14b55463a4b80ca7b6c3b373921310b61fcb3de5455ad2dea21f7" -"checksum pin-project-internal 0.4.0-alpha.5 (registry+https://github.com/rust-lang/crates.io-index)" = "8cbe07d1ffd722968221af234aff370f5d02de3dea17decf536df93ee9af2fd3" "checksum pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5894c618ce612a3fa23881b152b608bafb8c56cfc22f434a3ba3120b40f7b587" -"checksum pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c1d2cfa5a714db3b5f24f0915e74fcdf91d09d496ba61329705dda7774d2af" -"checksum platforms 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6cfec0daac55b13af394ceaaad095d17c790f77bdc9329264f06e49d6cd3206c" +"checksum pkg-config 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "72d5370d90f49f70bd033c3d75e87fc529fbfff9d6f7cccef07d6170079d91ea" +"checksum platforms 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "feb3b2b1033b8a60b4da6ee470325f887758c95d5320f52f9ce0df055a55940e" "checksum plist 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a9f075f6394100e7c105ed1af73fb1859d6fd14e49d4290d578120beb167f" "checksum png 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8422b27bb2c013dd97b9aef69e161ce262236f49aaf46a0489011c8ff0264602" "checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b" -"checksum pretty-hex 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "119929a2a3b731bb3d888f7a1b5dc3c1db28b6c134def5d99f7e16e2da16b8f7" +"checksum pretty-hex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be91bcc43e73799dc46a6c194a55e7aae1d86cc867c860fd4a436019af21bd8c" "checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" "checksum pretty_env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "717ee476b1690853d222af4634056d830b5197ffd747726a9a1eee6da9f49074" "checksum prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0fd04b170004fa2daccf418a7f8253aaf033c27760b5f225889024cf66d7ac2e" -"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" -"checksum proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5c2380ae88876faae57698be9e9775e3544decad214599c3a6266cca6ac802" +"checksum proc-macro2 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0" "checksum ptree 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6b0a3be00b19ee7bd33238c1c523a7ab4df697345f6b36f90827a7860ea938d4" "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" -"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" -"checksum rand 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c618c47cd3ebd209790115ab837de41425723956ad3ce2e6a7f09890947cacb9" -"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" -"checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c" -"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412" "checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" "checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" -"checksum rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "615e683324e75af5d43d8f7a39ffe3ee4a9dc42c5c701167a71dc59c3a493aca" -"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" "checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" -"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" -"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" -"checksum raw-cpuid 6.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "30a9d219c32c9132f7be513c18be77c9881c7107d2ab5569d205a6a0f0e6dc7d" +"checksum raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4a349ca83373cfa5d6dbb66fd76e58b2cca08da71a5f6400de0a0a6a9bceeaf" "checksum rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "33ec17a493dcb820725c002bc253f6f3ba4e4dc635e72c238540691b05e43897" "checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" "checksum readkey 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d98db94bb4f3e926c8d8186547cd9366d958d753aff5801214d93d38214e8f0f" "checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" "checksum redox_users 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ecedbca3bf205f8d8f5c2b44d83cd0690e39ee84b951ed649e9f1841132b66d" -"checksum regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88c3d9193984285d544df4a30c23a4e62ead42edf70a4452ceb76dac1ce05c26" +"checksum regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd" "checksum regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "92b73c2a1770c255c240eaa4ee600df1704a38dc3feaa6e949e7fcd4f8dc09f9" -"checksum regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b143cceb2ca5e56d5671988ef8b15615733e7ee16cd348e064333b251b89343f" +"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum render-tree 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "68ed587df09cfb7ce1bc6fe8f77e24db219f222c049326ccbfb948ec67e31664" "checksum result 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "194d8e591e405d1eecf28819740abed6d719d1a2db87fc0bcdedee9a26d55560" -"checksum roxmltree 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "153c367ce9fb8ef7afe637ef92bd083ba0f88b03ef3fcf0287d40be05ae0a61c" +"checksum roxmltree 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1a3193e568c6e262f817fd07af085c7f79241a947aedd3779d47eadc170e174" "checksum rusqlite 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2a194373ef527035645a1bc21b10dc2125f73497e6e155771233eb187aedd051" -"checksum rust-argon2 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "81ed8d04228b44a740c8d46ff872a28e50fff3d659f307ab4da2cc502e019ff3" +"checksum rust-argon2 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4ca4eaef519b494d1f2848fc602d18816fed808a981aedf4f1f00ceb7c9d32cf" "checksum rust-ini 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2" -"checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" +"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -"checksum rustyline 5.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f8ee0838a6594169a1c5f4bb9af0fe692cc99691941710a8cc6576395ede804e" -"checksum ryu 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c92464b447c0ee8c4fb3824ecc8383b81717b9f1e74ba2e72540aef7b9f82997" -"checksum safemem 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e133ccc4f4d1cd4f89cc8a7ff618287d56dc7f638b8e38fc32c5fdcadc339dd5" +"checksum rustyline 5.0.3 (git+https://github.com/kkawakam/rustyline.git)" = "" +"checksum ryu 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "19d2271fa48eaf61e53cc88b4ad9adcbafa2d512c531e7fadb6dc11a4d3656c5" +"checksum safemem 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d2b08423011dae9a5ca23f07cf57dac3857f5c885d352b76f6d95f4aea9434d0" "checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421" -"checksum schannel 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f6abf258d99c3c1c5c2131d99d064e94b7b3dd5f416483057f308fea253339" -"checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" -"checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" +"checksum schannel 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "87f550b06b6cba9c8b8be3ee73f391990116bf527450d2556e9b9ce263b9a021" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum serde 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8" -"checksum serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)" = "fec2851eb56d010dc9a21b89ca53ee75e6528bab60c11e89d38390904982da9f" +"checksum serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)" = "9796c9b7ba2ffe7a9ce53c2287dfc48080f4b2b362fcc245a259b3a7201119dd" "checksum serde-hjson 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0b833c5ad67d52ced5f5938b2980f32a9c1c5ef047f0b4fb3127e7a423c76153" "checksum serde-hjson 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8" "checksum serde-value 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7a663f873dedc4eac1a559d4c6bc0d0b2c34dc5ac4702e105014b8281489e44f" "checksum serde_bytes 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "45af0182ff64abaeea290235eb67da3825a576c5d53e642c4d5b652e12e6effc" -"checksum serde_derive 1.0.98 (registry+https://github.com/rust-lang/crates.io-index)" = "01e69e1b8a631f245467ee275b8c757b818653c6d704cdbcaeb56b56767b529c" -"checksum serde_derive_internals 0.24.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8a80c6c0b1ebbcea4ec2c7e9e2e9fa197a425d17f1afec8ba79fcd1352b18ffb" +"checksum serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)" = "4b133a43a1ecd55d4086bd5b4dc6c1751c68b1bfbeba7a5040442022c7e7c02e" "checksum serde_ini 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eb236687e2bb073a7521c021949be944641e671b8505a94069ca37b656c81139" -"checksum serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)" = "051c49229f282f7c6f3813f8286cc1e3323e8051823fce42c7ea80fe13521704" +"checksum serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "2f72eb2a68a7dc3f9a691bfda9305a1c017a6215e5a4545c258500d2099a37c2" "checksum serde_test 0.8.23 (registry+https://github.com/rust-lang/crates.io-index)" = "110b3dbdf8607ec493c22d5d947753282f3bae73c0f56d322af1e8c78e4c23d5" "checksum serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97" -"checksum serde_yaml 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "38b08a9a90e5260fe01c6480ec7c811606df6d3a660415808c3c3fa8ed95b582" +"checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35" "checksum shell32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ee04b46101f57121c9da2b151988283b6beb79b34f5bb29a58ee48cb695122c" "checksum shellexpand 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de7a5b5a9142fd278a10e0209b021a1b85849352e6951f4f914735c976737564" "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" -"checksum sluice 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ec70d7c3b17c262d4a18f7291c6ce62bf47170915f3b795434d3c5c49a4e59b7" +"checksum sluice 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0a7d06dfb3e8743bc19e6de8a302277471d08077d68946b307280496dc5a3531" "checksum smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ab606a9c5e214920bb66c458cd7be8ef094f813f20fe77a54cc7dbfff220d4b7" "checksum socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "e8b74de517221a2cb01a53349cf54182acdc31a074727d3079068448c0676d85" "checksum sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4bf77cb82ba8453b42b6ae1d692e4cdc92f9a47beaf89a847c8be83f4e328ad3" -"checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8" -"checksum stackvector 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1c4725650978235083241fab0fdc8e694c3de37821524e7534a1a9061d1068af" -"checksum static_assertions 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4f8de36da215253eb5f24020bfaa0646613b48bf7ebe36cdfa37c3b3b33b241" +"checksum static_assertions 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7f3eb36b47e512f8f1c9e3d10c2c1965bc992bd9cdb024fa581e2194501c83d3" "checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" +"checksum sublime_fuzzy 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97bd7ad698ea493a3a7f60c2ffa117c234f341e09f8cc2d39cef10cdde077acf" "checksum subprocess 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "28fc0f40f0c0da73339d347aa7d6d2b90341a95683a47722bc4eebed71ff3c00" "checksum surf 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "018eed64aede455beb88505d50c5c64882bebbe0996d4b660c272e3d8bb6f883" -"checksum syn 0.15.43 (registry+https://github.com/rust-lang/crates.io-index)" = "ee06ea4b620ab59a2267c6b48be16244a3389f8bfa0986bdd15c35b890b00af3" -"checksum syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c65d951ab12d976b61a41cf9ed4531fc19735c6e6d84a4bb1453711e762ec731" -"checksum synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f" +"checksum syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf" +"checksum synstructure 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f085a5855930c0441ca1288cf044ea4aecf4f43a91668abdb870b4ba546a203" "checksum syntect 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e80b8831c5a543192ffc3727f01cf0e57579c6ac15558e3048bfb5708892167b" "checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" "checksum term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "edd106a334b7657c10b7c540a0106114feadeb4dc314513e97df481d5d966f42" @@ -3393,34 +3102,33 @@ dependencies = [ "checksum tokio-io 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "5090db468dad16e1a7a54c8c67280c5e4b544f3d3e018f0b913b400261f85926" "checksum toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f" "checksum toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c7aabe75941d914b72bf3e5d3932ed92ce0664d49d8432305a8b547c37227724" -"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" -"checksum unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a84e5511b2a947f3ae965dcb29b13b7b1691b6e7332cf5dbc1744138d5acb7f6" +"checksum trash 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f24d31505f49e989b1ee2c03c323251f6763d5907d471b71192dac92e323f8" +"checksum typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d2783fe2d6b8c1101136184eb41be8b1ad379e4657050b8aaff0c79ee7575f9" +"checksum unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" "checksum unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "141339a08b982d942be2ca06ff8b076563cbe223d1befd5450716790d44e2426" "checksum unicode-segmentation 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1967f4cdfc355b37fd76d2a954fb2ed3871034eb4f26d60537d88795cfc332a9" "checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20" -"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" -"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" "checksum uom 0.23.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3ef5bbe8385736e498dbb0033361f764ab43a435192513861447b9f7714b3fec" "checksum uom 0.25.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3198c29f199fa8a23d732f4aa21ddc4f4d0a257cb0c2a44afea30145ce2575c1" "checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61" "checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47" "checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d" -"checksum uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a" "checksum vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "33dd455d0f96e90a75803cfeb7f948768c08d70a6de9a8d2362461935698bf95" "checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9658c94fa8b940eab2250bd5a457f9c48b748420d71293b165c8cdbe2f55f71e" -"checksum wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "dcddca308b16cd93c2b67b126c688e5467e4ef2e28200dc7dfe4ae284f2faefc" -"checksum wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "f805d9328b5fc7e5c6399960fd1889271b9b58ae17bdb2417472156cc9fafdd0" -"checksum wasm-bindgen-futures 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)" = "73c25810ee684c909488c214f55abcbc560beb62146d352b9588519e73c2fed9" -"checksum wasm-bindgen-macro 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "3ff88201a482abfc63921621f6cb18eb1efd74f136b05e5841e7f8ca434539e9" -"checksum wasm-bindgen-macro-support 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "6a433d89ecdb9f77d46fcf00c8cf9f3467b7de9954d8710c175f61e2e245bb0e" -"checksum wasm-bindgen-shared 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "d41fc1bc3570cdf8d108c15e014045fd45a95bb5eb36605f96a90461fc34027d" -"checksum wasm-bindgen-webidl 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "be53d289bf2fa7645a089cfd5c7a34bf4fe94221f58cf86ee42a7b4bc854ff14" -"checksum web-sys 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "6435c477200ad486089a7a72c2bd6c9bdf9740bd7fff868806076218076d8c51" +"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d" +"checksum wasm-bindgen 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "cd34c5ba0d228317ce388e87724633c57edca3e7531feb4e25e35aaa07a656af" +"checksum wasm-bindgen-backend 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "927196b315c23eed2748442ba675a4c54a1a079d90d9bdc5ad16ce31cf90b15b" +"checksum wasm-bindgen-futures 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "83420b37346c311b9ed822af41ec2e82839bfe99867ec6c54e2da43b7538771c" +"checksum wasm-bindgen-macro 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "92c2442bf04d89792816650820c3fb407af8da987a9f10028d5317f5b04c2b4a" +"checksum wasm-bindgen-macro-support 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "9c075d27b7991c68ca0f77fe628c3513e64f8c477d422b859e03f28751b46fc5" +"checksum wasm-bindgen-shared 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "83d61fe986a7af038dd8b5ec660e5849cbd9f38e7492b9404cc48b2b4df731d1" +"checksum wasm-bindgen-webidl 0.2.51 (registry+https://github.com/rust-lang/crates.io-index)" = "9b979afb0535fe4749906a674082db1211de8aef466331d43232f63accb7c07c" +"checksum web-sys 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "c84440699cd02ca23bed6f045ffb1497bc18a3c2628bd13e2093186faaaacf6b" "checksum weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bb43f70885151e629e2a19ce9e50bd730fd436cfd4b666894c9ce4de9141164" "checksum which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b57acb10231b9493c8472b20cb57317d0679a49e0bdbee44b3b803a6473af164" "checksum widestring 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "effc0e4ff8085673ea7b9b2e3c73f6bd4d118810c9009ed8f1e16bd96c331db6" @@ -3430,11 +3138,11 @@ dependencies = [ "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" "checksum winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7168bab6e1daee33b4557efd0e95d5ca70a03706d39fa5f3fe7a236f584b03c9" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -"checksum wincolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "561ed901ae465d6185fa7864d63fbd5720d0ef718366c9a4dc83cf6170d7e9ba" +"checksum wincolor 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "96f5016b18804d24db43cebf3c77269e7569b8954a8464501c216cc5e070eaa9" "checksum x11 2.18.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39697e3123f715483d311b5826e254b6f3cfebdd83cf7ef3358f579c3d68e235" "checksum x11-clipboard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "89bd49c06c9eb5d98e6ba6536cf64ac9f7ee3a009b2f53996d405b3944f6bcea" "checksum xcb 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5e917a3f24142e9ff8be2414e36c649d47d6cc2ba81f16201cdef96e533e02de" "checksum xdg 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d089681aa106a86fade1b0128fb5daf07d5867a509ab036d99988dec80429a57" "checksum xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "541b12c998c5b56aa2b4e6f18f03664eef9a4fd0a246a55594efae6cc2d964b5" -"checksum xmlparser 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ecec95f00fb0ff019153e64ea520f87d1409769db3e8f4db3ea588638a3e1cee" +"checksum xmlparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8110496c5bcc0d966b0b2da38d5a791aa139eeb0b80e7840a7463c2b806921eb" "checksum yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65923dd1784f44da1d2c3dbbc5e822045628c590ba72123e1c73d3c230c4434d" diff --git a/Cargo.toml b/Cargo.toml index dee6503e50..97b02b450c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nu" -version = "0.2.0" +version = "0.4.1" authors = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] description = "A shell for the GitHub era" license = "MIT" @@ -8,83 +8,94 @@ edition = "2018" readme = "README.md" default-run = "nu" repository = "https://github.com/nushell/nushell" -homepage = "http://nushell.sh" +homepage = "https://www.nushell.sh" documentation = "https://book.nushell.sh" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -rustyline = "5.0.2" -chrono = { version = "0.4.7", features = ["serde"] } +rustyline = "5.0.4" +chrono = { version = "0.4.9", features = ["serde"] } derive-new = "0.5.8" prettytable-rs = "0.8.0" itertools = "0.8.0" -ansi_term = "0.12.0" +ansi_term = "0.12.1" nom = "5.0.0" dunce = "1.0.0" -indexmap = { version = "1.1.0", features = ["serde-1"] } +indexmap = { version = "1.2.0", features = ["serde-1"] } chrono-humanize = "0.0.11" byte-unit = "3.0.1" base64 = "0.10.1" futures-preview = { version = "=0.3.0-alpha.18", features = ["compat", "io-compat"] } -futures-async-stream = "=0.1.0-alpha.5" +async-stream = "0.1.1" futures_codec = "0.2.5" num-traits = "0.2.8" term = "0.5.2" bytes = "0.4.12" log = "0.4.8" pretty_env_logger = "0.3.1" -serde = { version = "1.0.99", features = ["derive"] } +serde = { version = "1.0.100", features = ["derive"] } bson = { version = "0.14.0", features = ["decimal128"] } serde_json = "1.0.40" serde-hjson = "0.9.1" serde_yaml = "0.8" serde_bytes = "0.11.2" getset = "0.0.8" -language-reporting = "0.3.1" +language-reporting = "0.4.0" app_dirs = "1.2.1" csv = "1.1" toml = "0.5.3" clap = "2.33.0" -git2 = { version = "0.10.0", default_features = false } +git2 = { version = "0.10.1", default_features = false } dirs = "2.0.2" glob = "0.3.0" ctrlc = "3.1.3" -ptree = "0.2" surf = "1.0.2" url = "2.1.0" roxmltree = "0.7.0" -nom5_locate = "0.1.1" -enum-utils = "0.1.1" +nom_locate = "1.0.0" +nom-tracable = "0.4.0" unicode-xid = "0.2.0" serde_ini = "0.2.0" subprocess = "0.1.18" -mime = "0.3.13" -regex = "1.2.1" +mime = "0.3.14" pretty-hex = "0.1.0" -neso = { version = "0.5.0", optional = true } hex = "0.3.2" -crossterm = "0.10.2" tempfile = "3.1.0" -image = { version = "0.22.1", default_features = false, features = ["png_codec", "jpeg"] } semver = "0.9.0" -uuid = {version = "0.7.4", features = [ "v4", "serde" ]} -syntect = "3.2.0" -onig_sys = "=69.1.0" -heim = "0.0.7" which = "2.0.1" -battery = "0.7.4" textwrap = {version = "0.11.0", features = ["term_size"]} +shellexpand = "1.0.0" +futures-timer = "0.4.0" +pin-utils = "0.1.0-alpha.4" +num-bigint = { version = "0.2.3", features = ["serde"] } +bigdecimal = { version = "0.1.0", features = ["serde"] } +natural = "0.3.0" +serde_urlencoded = "0.6.1" +sublime_fuzzy = "0.5" +trash = "1.0.0" +regex = "1" + +neso = { version = "0.5.0", optional = true } +crossterm = { version = "0.10.2", optional = true } +syntect = {version = "3.2.0", optional = true } +onig_sys = {version = "=69.1.0", optional = true } +heim = {version = "0.0.8", optional = true } +battery = {version = "0.7.4", optional = true } rawkey = {version = "0.1.2", optional = true } clipboard = {version = "0.5", optional = true } -shellexpand = "1.0.0" -futures-timer = "0.3.0" -pin-utils = "0.1.0-alpha.4" -num-bigint = { version = "0.2.2", features = ["serde"] } -bigdecimal = { version = "0.1.0", features = ["serde"] } +ptree = {version = "0.2" } +image = { version = "0.22.2", default_features = false, features = ["png_codec", "jpeg"], optional = true } [features] +default = ["textview", "sys", "ps"] raw-key = ["rawkey", "neso"] +textview = ["syntect", "onig_sys", "crossterm"] +binaryview = ["image", "crossterm"] +sys = ["heim", "battery"] +ps = ["heim"] +# trace = ["nom-tracable/trace"] +all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard"] [dependencies.rusqlite] version = "0.20.0" @@ -93,6 +104,10 @@ features = ["bundled", "blob"] [dev-dependencies] pretty_assertions = "0.6.1" +[build-dependencies] +toml = "0.5.3" +serde = { version = "1.0.101", features = ["derive"] } + [lib] name = "nu" path = "src/lib.rs" @@ -105,6 +120,10 @@ path = "src/plugins/inc.rs" name = "nu_plugin_sum" path = "src/plugins/sum.rs" +[[bin]] +name = "nu_plugin_average" +path = "src/plugins/average.rs" + [[bin]] name = "nu_plugin_embed" path = "src/plugins/embed.rs" @@ -117,6 +136,10 @@ path = "src/plugins/add.rs" name = "nu_plugin_edit" path = "src/plugins/edit.rs" +[[bin]] +name = "nu_plugin_read" +path = "src/plugins/read.rs" + [[bin]] name = "nu_plugin_str" path = "src/plugins/str.rs" @@ -125,21 +148,40 @@ path = "src/plugins/str.rs" name = "nu_plugin_skip" path = "src/plugins/skip.rs" +[[bin]] +name = "nu_plugin_match" +path = "src/plugins/match.rs" +required-features = ["regex"] + [[bin]] name = "nu_plugin_sys" path = "src/plugins/sys.rs" +required-features = ["sys"] + +[[bin]] +name = "nu_plugin_ps" +path = "src/plugins/ps.rs" +required-features = ["ps"] [[bin]] name = "nu_plugin_tree" path = "src/plugins/tree.rs" +required-features = ["tree"] [[bin]] name = "nu_plugin_binaryview" path = "src/plugins/binaryview.rs" +required-features = ["binaryview"] [[bin]] name = "nu_plugin_textview" path = "src/plugins/textview.rs" +required-features = ["textview"] + +[[bin]] +name = "nu_plugin_docker" +path = "src/plugins/docker.rs" +required-features = ["docker"] [[bin]] name = "nu" diff --git a/README.md b/README.md index d2243e1193..5e482bc29e 100644 --- a/README.md +++ b/README.md @@ -1,25 +1,36 @@ [![Crates.io](https://img.shields.io/crates/v/nu.svg)](https://crates.io/crates/nu) -[![Build Status](https://dev.azure.com/nushell/nushell/_apis/build/status/nushell.nushell?branchName=master)](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master) +[![Build Status](https://dev.azure.com/nushell/nushell/_apis/build/status/nushell.nushell?branchName=master)](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master) [![Discord](https://img.shields.io/discord/601130461678272522.svg?logo=discord)](https://discord.gg/NtAbbGn) +[![The Changelog #363](https://img.shields.io/badge/The%20Changelog-%23363-61c192.svg)](https://changelog.com/podcast/363) + - # Nu Shell -A modern shell for the GitHub era +A modern shell for the GitHub era. -![Example of nushell](images/nushell-autocomplete4.gif "Example of nushell") +![Example of nushell](images/nushell-autocomplete.gif "Example of nushell") # Status -This project has reached a minimum-viable product level of quality. While contributors dogfood it as their daily driver, it may be instable for some commands. Future releases will work fill out missing features and improve stability. Its design is also subject to change as it matures. +This project has reached a minimum-viable product level of quality. While contributors dogfood it as their daily driver, it may be unstable for some commands. Future releases will work to fill out missing features and improve stability. Its design is also subject to change as it matures. -Nu comes with a set of built-in commands (listed below). If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and MacOS), correctly passing through stdin, stdout and stderr, so things like your daily git workflows and even `vim` will work just fine. +Nu comes with a set of built-in commands (listed below). If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and macOS), correctly passing through stdin, stdout, and stderr, so things like your daily git workflows and even `vim` will work just fine. -There is also a [book](https://book.nushell.sh) about Nu, currently in progress. +# Learning more + +There are a few good resources to learn about Nu. There is a [book](https://book.nushell.sh) about Nu that is currently in progress. The book focuses on using Nu and its core concepts. + +If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://github.com/nushell/contributor-book/tree/master/en) to help you get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in. + +We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us. + +Try it in Gitpod. + +[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/nushell/nushell) # Installation -## Local +## Local Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). @@ -27,31 +38,26 @@ To build Nu, you will need to use the **nightly** version of the compiler. Required dependencies: -* libssl (only needed on Linux) - * on Debian/Ubuntu: `apt install libssl-dev` +* pkg-config and libssl (only needed on Linux) + * on Debian/Ubuntu: `apt install pkg-config libssl-dev` Optional dependencies: * To use Nu with all possible optional features enabled, you'll also need the following: * on Linux (on Debian/Ubuntu): `apt install libxcb-composite0-dev libx11-dev` -To install Nu via cargo: +To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the beta compiler via `rustup install beta`): ``` -cargo +nightly install nu +cargo +beta install nu ``` -You can also install Nu with all the bells and whistles: +You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform): ``` -cargo +nightly install nu --features raw-key,clipboard +cargo +beta install nu --all-features ``` -The following optional features are currently supported: - -* **raw-key** - direct keyboard input, which creates a smoother experience in viewing text and binaries -* **clipboard** - integration with the native clipboard via the `clip` command - ## Docker If you want to pull a pre-built container, you can browse tags for the [nushell organization](https://quay.io/organization/nushell) @@ -70,13 +76,13 @@ To build the base image: ```bash $ docker build -f docker/Dockerfile.nu-base -t nushell/nu-base . -``` +``` And then to build the smaller container (using a Multistage build): ```bash $ docker build -f docker/Dockerfile -t nushell/nu . -``` +``` Either way, you can run either container as follows: @@ -86,35 +92,44 @@ $ docker run -it nushell/nu /> exit ``` -The second container is a bit smaller, if size is important to you. +The second container is a bit smaller if the size is important to you. + +## Packaging status + +[![Packaging status](https://repology.org/badge/vertical-allrepos/nushell.svg)](https://repology.org/project/nushell/versions) + +### Fedora + +[COPR repo](https://copr.fedorainfracloud.org/coprs/atim/nushell/): `sudo dnf copr enable atim/nushell -y && sudo dnf install nushell -y` # Philosophy -Nu draws inspiration from projects like PowerShell, functional programming languages, and modern cli tools. Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure. For example, when you list the contents of a directory, what you get back is a list of objects, where each object represents an item in that directory. These values can be piped through a series of steps, in a series of commands called a 'pipeline'. +Nu draws inspiration from projects like PowerShell, functional programming languages, and modern CLI tools. Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure. For example, when you list the contents of a directory, what you get back is a table of rows, where each row represents an item in that directory. These values can be piped through a series of steps, in a series of commands called a 'pipeline'. ## Pipelines -In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps. Nu takes this a step further and builds heavily on the idea of _pipelines_. Just as the Unix philosophy, Nu allows commands to output from stdout and read from stdin. Additionally, commands can output structured data (you can think of this as a third kind of stream). Commands that work in the pipeline fit into one of three categories +In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps. Nu takes this a step further and builds heavily on the idea of _pipelines_. Just as the Unix philosophy, Nu allows commands to output from stdout and read from stdin. Additionally, commands can output structured data (you can think of this as a third kind of stream). Commands that work in the pipeline fit into one of three categories: * Commands that produce a stream (eg, `ls`) * Commands that filter a stream (eg, `where type == "Directory"`) -* Commands that consumes the output of the pipeline (eg, `autoview`) +* Commands that consume the output of the pipeline (eg, `autoview`) Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right. ``` /home/jonathan/Source/nushell(master)> ls | where type == "Directory" | autoview ---------+-----------+----------+--------+--------------+---------------- - name | type | readonly | size | accessed | modified ---------+-----------+----------+--------+--------------+---------------- - target | Directory | | 4.1 KB | 19 hours ago | 19 hours ago - images | Directory | | 4.1 KB | 2 weeks ago | a week ago - tests | Directory | | 4.1 KB | 2 weeks ago | 18 minutes ago - docs | Directory | | 4.1 KB | a week ago | a week ago - .git | Directory | | 4.1 KB | 2 weeks ago | 25 minutes ago - src | Directory | | 4.1 KB | 2 weeks ago | 25 minutes ago - .cargo | Directory | | 4.1 KB | 2 weeks ago | 2 weeks ago ---------+-----------+----------+--------+--------------+---------------- +━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +────┼───────────┼───────────┼──────────┼────────┼──────────────┼──────────────── + 0 │ .azure │ Directory │ │ 4.1 KB │ 2 months ago │ a day ago + 1 │ target │ Directory │ │ 4.1 KB │ 3 days ago │ 3 days ago + 2 │ images │ Directory │ │ 4.1 KB │ 2 months ago │ 2 weeks ago + 3 │ tests │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago + 4 │ tmp │ Directory │ │ 4.1 KB │ 2 weeks ago │ 2 weeks ago + 5 │ src │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago + 6 │ assets │ Directory │ │ 4.1 KB │ a month ago │ a month ago + 7 │ docs │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago +━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ ``` Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed. We could have also written the above: @@ -126,15 +141,16 @@ Because most of the time you'll want to see the output of a pipeline, `autoview` Being able to use the same commands and compose them differently is an important philosophy in Nu. For example, we could use the built-in `ps` command as well to get a list of the running processes, using the same `where` as above. ```text -C:\Code\nushell(master)> ps | where cpu > 0 ------------------- +-----+-------+-------+---------- - name | cmd | cpu | pid | status ------------------- +-----+-------+-------+---------- - msedge.exe | - | 0.77 | 26472 | Runnable - nu.exe | - | 7.83 | 15473 | Runnable - SearchIndexer.exe | - | 82.17 | 23476 | Runnable - BlueJeans.exe | - | 4.54 | 10000 | Runnable --------------------+-----+-------+-------+---------- +/home/jonathan/Source/nushell(master)> ps | where cpu > 0 +━━━┯━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━ + # │ pid │ name │ status │ cpu +───┼───────┼─────────────────┼──────────┼────────── + 0 │ 992 │ chrome │ Sleeping │ 6.988768 + 1 │ 4240 │ chrome │ Sleeping │ 5.645982 + 2 │ 13973 │ qemu-system-x86 │ Sleeping │ 4.996551 + 3 │ 15746 │ nu │ Sleeping │ 84.59905 +━━━┷━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━ + ``` ## Opening files @@ -143,36 +159,36 @@ Nu can load file and URL contents as raw text or as structured data (if it recog ``` /home/jonathan/Source/nushell(master)> open Cargo.toml ------------------+------------------+----------------- - dependencies | dev-dependencies | package ------------------+------------------+----------------- - [object Object] | [object Object] | [object Object] ------------------+------------------+----------------- +━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━ + bin │ dependencies │ dev-dependencies +──────────────────┼────────────────┼────────────────── + [table: 12 rows] │ [table: 1 row] │ [table: 1 row] +━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━ ``` We can pipeline this into a command that gets the contents of one of the columns: ``` /home/jonathan/Source/nushell(master)> open Cargo.toml | get package --------------+----------------------------+---------+---------+------+--------- - authors | description | edition | license | name | version --------------+----------------------------+---------+---------+------+--------- - [list List] | A shell for the GitHub era | 2018 | MIT | nu | 0.2.0 --------------+----------------------------+---------+---------+------+--------- +━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━ + authors │ description │ edition │ license │ name │ version +─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼───────── + [table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.4.0 +━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━ ``` Finally, we can use commands outside of Nu once we have the data we want: ``` /home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it -0.2.0 +0.4.0 ``` Here we use the variable `$it` to refer to the value being piped to the external command. ## Shells -By default, Nu will work inside of a single directory and allow you to navigate around your filesystem. Sometimes, you'll want to work in multiple directories at the same time. For this, Nu offers a way of adding additional working directories that you can jump between. +Nu will work inside of a single directory and allow you to navigate around your filesystem by default. Nu also offers a way of adding additional working directories that you can jump between, allowing you to work in multiple directories at the same time. To do so, use the `enter` command, which will allow you create a new "shell" and enter it at the specified path. You can toggle between this new shell and the original shell with the `p` (for previous) and `n` (for next), allowing you to navigate around a ring buffer of shells. Once you're done with a shell, you can `exit` it and remove it from the ring buffer. @@ -180,11 +196,11 @@ Finally, to get a list of all the current shells, you can use the `shells` comma ## Plugins -Nu supports plugins that offer additional functionality to the shell and follow the same object model that built-in commands use. This allows you to extend nu for your needs. +Nu supports plugins that offer additional functionality to the shell and follow the same structured data model that built-in commands use. This allows you to extend nu for your needs. There are a few examples in the `plugins` directory. -Plugins are binaries that are available in your path and follow a "nu_plugin_*" naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases. +Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases. # Goals @@ -196,7 +212,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat * Nu's workflow and tools should have the usability in day-to-day experience of using a shell in 2019 (and beyond). -* Nu views data as both structured and unstructured. It is an object shell like PowerShell. +* Nu views data as both structured and unstructured. It is a structured shell like PowerShell. * Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state. @@ -206,56 +222,63 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | ------------- | ------------- | | cd path | Change to a new path | | cp source path | Copy files | +| date (--utc) | Get the current datetime | +| fetch url | Fetch contents from a url and retrieve data as a table if possible | +| help | Display help information about commands | | ls (path) | View the contents of the current or given path | | mkdir path | Make directories, creates intermediary directories as required. | | mv source target | Move files or directories. | -| date (--utc) | Get the current datetime | +| open filename | Load a file into a cell, convert to table if possible (avoid by appending '--raw') | +| post url body (--user ) (--password ) | Post content to a url and retrieve data as a table if possible | | ps | View current processes | | sys | View information about the current system | | which filename | Finds a program file. | -| open {filename or url} | Load a file into a cell, convert to table if possible (avoid by appending '--raw') | -| post url body (--user ) (--password ) | Post content to a url and retrieve data as a table if possible | | rm {file or directory} | Remove a file, (for removing directory append '--recursive') | +| version | Display Nu version | + +## Shell commands +| command | description | +| ------- | ----------- | | exit (--now) | Exit the current shell (or all shells) | | enter (path) | Create a new shell and begin at this path | | p | Go to previous shell | | n | Go to next shell | | shells | Display the list of current shells | -| help | Display help information about commands | -| version | Display Nu version | ## Filters on tables (structured data) | command | description | | ------------- | ------------- | -| pick ...columns | Down-select table to only these columns | -| reject ...columns | Remove the given columns from the table | +| add column-or-column-path value | Add a new column to the table | +| append row-data | Append a row to the end of the table | +| count | Show the total number of rows | +| edit column-or-column-path value | Edit an existing column to have a new value | +| embed column | Creates a new table of one column with the given name, and places the current table inside of it | +| first amount | Show only the first number of rows | | get column-or-column-path | Open column and get data from the corresponding cells | -| sort-by ...columns | Sort by the given columns | -| where condition | Filter table to match the condition | -| inc (field) | Increment a value or version. Optional use the field of a table | -| add field value | Add a new field to the table | -| embed field | Embeds a new field to the table | -| sum | Sum a column of values | -| edit field value | Edit an existing field to have a new value | +| group-by column | Creates a new table with the data from the table rows grouped by the column given | +| inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table | +| last amount | Show only the last number of rows | +| nth row-number | Return only the selected row | +| pick ...columns | Down-select table to only these columns | +| pivot --header-row | Pivot the tables, making columns into rows and vice versa | +| prepend row-data | Prepend a row to the beginning of the table | +| reject ...columns | Remove the given columns from the table | | reverse | Reverses the table. | | skip amount | Skip a number of rows | | skip-while condition | Skips rows while the condition matches. | -| first amount | Show only the first number of rows | -| last amount | Show only the last number of rows | -| nth row-number | Return only the selected row | -| str (field) | Apply string function. Optional use the field of a table | +| sort-by ...columns | Sort by the given columns | +| str (column) | Apply string function. Optionally use the column of a table | +| sum | Sum a column of values | | tags | Read the tags (metadata) for values | -| from-array | Expand an array/list into rows | -| to-array | Collapse rows into a single list | -| to-json | Convert table into .json text | -| to-toml | Convert table into .toml text | -| to-yaml | Convert table into .yaml text | -| to-bson | Convert table into .bson text | -| to-csv | Convert table into .csv text | | to-bson | Convert table into .bson binary data | -| to-tsv | Convert table into .tsv text | +| to-csv | Convert table into .csv text | +| to-json | Convert table into .json text | | to-sqlite | Convert table to sqlite .db binary data | -| reverse | Reverse the rows of a table | +| to-toml | Convert table into .toml text | +| to-tsv | Convert table into .tsv text | +| to-url | Convert table to a urlencoded string | +| to-yaml | Convert table into .yaml text | +| where condition | Filter table to match the condition | ## Filters on text (unstructured data) | command | description | @@ -265,13 +288,16 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | from-ini | Parse text as .ini and create table | | from-json | Parse text as .json and create table | | from-sqlite | Parse binary data as sqlite .db and create table | +| from-ssv --minimum-spaces | Parse text as space-separated values and create table | | from-toml | Parse text as .toml and create table | | from-tsv | Parse text as .tsv and create table | +| from-url | Parse urlencoded string and create a table | | from-xml | Parse text as .xml and create a table | | from-yaml | Parse text as a .yaml/.yml and create a table | | lines | Split single string into rows, one per line | +| read pattern | Convert text to a table by matching the given pattern | | size | Gather word count statistics on the text | -| split-column sep ...fields | Split row contents across multiple columns via the separator | +| split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names | | split-row sep | Split row contents over multiple rows via the separator | | trim | Trim leading and following whitespace from text data | | {external-command} $it | Run external command with given arguments, replacing $it with each row text | @@ -280,13 +306,12 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat | command | description | | ------------- | ------------- | | autoview | View the contents of the pipeline as a table or list | -| binaryview | Autoview of binary data | -| clip | Copy the contents of the pipeline to the copy/paste buffer | +| binaryview | Autoview of binary data (optional feature) | +| clip | Copy the contents of the pipeline to the copy/paste buffer (optional feature) | | save filename | Save the contents of the pipeline to a file | | table | View the contents of the pipeline as a table | | textview | Autoview of text data | -| tree | View the contents of the pipeline as a tree | -| vtable | View the contents of the pipeline as a vertical (rotated) table | +| tree | View the contents of the pipeline as a tree (optional feature) | # License diff --git a/build.rs b/build.rs new file mode 100644 index 0000000000..44a55f9573 --- /dev/null +++ b/build.rs @@ -0,0 +1,39 @@ +use serde::Deserialize; +use std::collections::HashMap; +use std::collections::HashSet; +use std::env; +use std::path::Path; + +#[derive(Deserialize)] +struct Feature { + #[allow(unused)] + description: String, + enabled: bool, +} + +fn main() -> Result<(), Box> { + let input = env::var("CARGO_MANIFEST_DIR").unwrap(); + let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok(); + let flags: HashSet = env::var("NUSHELL_ENABLE_FLAGS") + .map(|s| s.split(",").map(|s| s.to_string()).collect()) + .unwrap_or_else(|_| HashSet::new()); + + if all_on && !flags.is_empty() { + println!( + "cargo:warning={}", + "Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both." + ); + } + + let path = Path::new(&input).join("features.toml"); + + let toml: HashMap = toml::from_str(&std::fs::read_to_string(path)?)?; + + for (key, value) in toml.iter() { + if value.enabled == true || all_on || flags.contains(key) { + println!("cargo:rustc-cfg={}", key); + } + } + + Ok(()) +} diff --git a/debian/changelog b/debian/changelog new file mode 100644 index 0000000000..d6f8273939 --- /dev/null +++ b/debian/changelog @@ -0,0 +1,5 @@ +nu (0.2.0-1) unstable; urgency=low + + * Initial release + + -- Jan Koprowski Wed, 04 Sep 2019 21:38:44 +0200 diff --git a/debian/compat b/debian/compat new file mode 100644 index 0000000000..f599e28b8a --- /dev/null +++ b/debian/compat @@ -0,0 +1 @@ +10 diff --git a/debian/control b/debian/control new file mode 100644 index 0000000000..50c156c8da --- /dev/null +++ b/debian/control @@ -0,0 +1,18 @@ +Source: nu +Section: shells +Priority: optional +Maintainer: Jan Koprowski +Build-Depends: debhelper (>= 10) +Standards-Version: 4.1.2 +Homepage: https://github.com/nushell/nushell +Vcs-Git: https://github.com/nushell/nushell.git +Vcs-Browser: https://github.com/nushell/nushell + +Package: nu +Architecture: any +Depends: ${shlibs:Depends}, ${misc:Depends} +Description: A modern shell for the GitHub era + The goal of this project is to take the Unix + philosophy of shells, where pipes connect simple + commands together, and bring it to the modern + style of development. diff --git a/debian/copyright b/debian/copyright new file mode 100644 index 0000000000..81ce9e5e34 --- /dev/null +++ b/debian/copyright @@ -0,0 +1,32 @@ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: nu +Source: https://github.com/nushell/nushell + +Files: * +Copyright: 2019 Yehuda Katz + 2019 Jonathan Turner +License: MIT + +Files: debian/* +Copyright: 2019 Yehuda Katz + 2019 Jonathan Turner +License: MIT + +License: MIT + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + . + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + . + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/debian/install b/debian/install new file mode 100644 index 0000000000..e9ebfc1232 --- /dev/null +++ b/debian/install @@ -0,0 +1,11 @@ +target/release/nu usr/bin +target/release/nu_plugin_binaryview usr/bin +target/release/nu_plugin_edit usr/bin +target/release/nu_plugin_inc usr/bin +target/release/nu_plugin_skip usr/bin +target/release/nu_plugin_str usr/bin +target/release/nu_plugin_sum usr/bin +target/release/nu_plugin_sys usr/bin +target/release/nu_plugin_textview usr/bin +target/release/nu_plugin_tree usr/bin +target/release/nu_plugin_docker usr/bin diff --git a/debian/postinst b/debian/postinst new file mode 100644 index 0000000000..861d76811d --- /dev/null +++ b/debian/postinst @@ -0,0 +1,8 @@ +#! /bin/bash + +if [ "$1" = configure ] && which add-shell >/dev/null +then + add-shell /usr/bin/nu +fi + +exit 0 diff --git a/debian/postrm b/debian/postrm new file mode 100644 index 0000000000..1e4655c7be --- /dev/null +++ b/debian/postrm @@ -0,0 +1,17 @@ +#!/bin/sh + +set -e + +case "$1" in + upgrade|failed-upgrade|abort-install|abort-upgrade) + ;; + remove|purge|disappear) + if which remove-shell >/dev/null && [ -f /etc/shells ]; then + remove-shell /usr/bin/nu + fi + ;; + *) + echo "postrm called with unknown argument \`$1'" >&2 + exit 1 + ;; +esac diff --git a/debian/rules b/debian/rules new file mode 100755 index 0000000000..e1c367c123 --- /dev/null +++ b/debian/rules @@ -0,0 +1,25 @@ +#!/usr/bin/make -f +# See debhelper(7) (uncomment to enable) +# output every command that modifies files on the build system. +#export DH_VERBOSE = 1 + + +# see FEATURE AREAS in dpkg-buildflags(1) +#export DEB_BUILD_MAINT_OPTIONS = hardening=+all + +# see ENVIRONMENT in dpkg-buildflags(1) +# package maintainers to append CFLAGS +#export DEB_CFLAGS_MAINT_APPEND = -Wall -pedantic +# package maintainers to append LDFLAGS +#export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed + + +%: + dh $@ + + +# dh_make generated override targets +# This is example for Cmake (See https://bugs.debian.org/641051 ) +#override_dh_auto_configure: +# dh_auto_configure -- # -DCMAKE_LIBRARY_PATH=$(DEB_HOST_MULTIARCH) + diff --git a/debian/source/format b/debian/source/format new file mode 100644 index 0000000000..163aaf8d82 --- /dev/null +++ b/debian/source/format @@ -0,0 +1 @@ +3.0 (quilt) diff --git a/docker/Dockerfile b/docker/Dockerfile index d8bc40f657..ffb1e5377d 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,5 +1,9 @@ -ARG FROMTAG=latest +ARG FROMTAG=latest FROM quay.io/nushell/nu-base:${FROMTAG} as base -FROM rust:1.37-slim +FROM ubuntu:18.04 COPY --from=base /usr/local/bin/nu /usr/local/bin/nu +ENV DEBIAN_FRONTEND noninteractive +RUN apt-get update && apt-get install -y libssl-dev \ + pkg-config ENTRYPOINT ["nu"] +CMD ["-l", "info"] diff --git a/docker/Dockerfile.nu-base b/docker/Dockerfile.nu-base index b322efb5b2..1a9e83a11e 100644 --- a/docker/Dockerfile.nu-base +++ b/docker/Dockerfile.nu-base @@ -1,4 +1,4 @@ -FROM rust:1.37-slim +FROM ubuntu:18.04 # docker build -f docker/Dockerfile.nu-base -t nushell/nu-base . # docker run -it nushell/nu-base @@ -6,13 +6,20 @@ FROM rust:1.37-slim ENV DEBIAN_FRONTEND noninteractive RUN apt-get update && apt-get install -y libssl-dev \ libxcb-composite0-dev \ - libx11-dev \ - pkg-config - -RUN USER=root cargo new --bin /code + pkg-config \ + curl +ARG RELEASE=false WORKDIR /code -ADD . /code -RUN cargo build --release && cargo run --release -RUN cp target/release/nu /usr/local/bin +COPY ./rust-toolchain ./rust-toolchain +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain `cat rust-toolchain` +ENV PATH=/root/.cargo/bin:$PATH +COPY . /code +RUN echo "##vso[task.prependpath]/root/.cargo/bin" && \ + rustc -Vv && \ + if $RELEASE; then cargo build --release; \ + cp target/release/nu /usr/local/bin; \ + else cargo build; \ + cp target/debug/nu /usr/local/bin; fi; ENTRYPOINT ["nu"] +CMD ["-l", "info"] diff --git a/docker/Package.Dockerfile b/docker/Package.Dockerfile new file mode 100644 index 0000000000..a9040d2900 --- /dev/null +++ b/docker/Package.Dockerfile @@ -0,0 +1,7 @@ +ARG base +FROM ${base} + +ARG artifact +COPY ${artifact} /bin/ + +ENTRYPOINT ["/bin/nu"] \ No newline at end of file diff --git a/docker/Package.glibc-busybox.Dockerfile b/docker/Package.glibc-busybox.Dockerfile new file mode 100644 index 0000000000..6191b5f04c --- /dev/null +++ b/docker/Package.glibc-busybox.Dockerfile @@ -0,0 +1,15 @@ +ARG base +FROM debian:stable-slim AS patch +FROM ${base} + +ARG artifact +COPY ${artifact} /bin/ + +COPY --from=patch \ + /lib/x86_64-linux-gnu/libz.so.1 \ + /lib/x86_64-linux-gnu/libdl.so.2 \ + /lib/x86_64-linux-gnu/librt.so.1 \ + /lib/x86_64-linux-gnu/libgcc_s.so.1 \ + /lib/x86_64-linux-gnu/ + +ENTRYPOINT ["/bin/nu"] \ No newline at end of file diff --git a/docker/Package.glibc-distroless.Dockerfile b/docker/Package.glibc-distroless.Dockerfile new file mode 100644 index 0000000000..42768fc08c --- /dev/null +++ b/docker/Package.glibc-distroless.Dockerfile @@ -0,0 +1,12 @@ +ARG base +FROM debian:stable-slim AS patch +FROM ${base} + +ARG artifact +COPY ${artifact} /bin/ + +COPY --from=patch \ + /lib/x86_64-linux-gnu/libz.so.1 \ + /lib/x86_64-linux-gnu/ + +ENTRYPOINT ["/bin/nu"] \ No newline at end of file diff --git a/docker/docker-compose.package.yml b/docker/docker-compose.package.yml new file mode 100644 index 0000000000..9be36544eb --- /dev/null +++ b/docker/docker-compose.package.yml @@ -0,0 +1,11 @@ +version: '3' + +services: + nushell: + image: ${REGISTRY}/nu:${TAG} + build: + context: .. + dockerfile: docker/Package${PATCH}.Dockerfile + args: + base: ${BASE_IMAGE} + artifact: ${NU_BINS} diff --git a/docker/packaging/Dockerfile.ubuntu-bionic b/docker/packaging/Dockerfile.ubuntu-bionic new file mode 100644 index 0000000000..144f7b421e --- /dev/null +++ b/docker/packaging/Dockerfile.ubuntu-bionic @@ -0,0 +1,17 @@ +# docker build -f docker/packaging/Dockerfile.ubuntu-bionic . + +ARG FROMTAG=latest +FROM quay.io/nushell/nu-base:${FROMTAG} + +RUN apt-get update && apt-get install -y \ + devscripts \ + debhelper + +COPY debian /code/debian + +RUN rustc -Vv && cargo build --release && \ + cp README.md debian/README.Debian && \ + debuild -b -us -uc -i && \ + dpkg -i ../nu_0.2.0-1_amd64.deb && \ + chsh -s /usr/bin/nu && \ + echo 'ls | get name | echo $it' | /usr/bin/nu \ No newline at end of file diff --git a/docker/packaging/README.md b/docker/packaging/README.md new file mode 100644 index 0000000000..e825c2780f --- /dev/null +++ b/docker/packaging/README.md @@ -0,0 +1,55 @@ +# Packaging + +This directory contains docker images used for creating packages for different distribution. + +## How to use this docker files? + +Start with: + +```bash +$ docker build -f docker/packaging/Dockerfile.ubuntu-bionic -t nushell/package:ubuntu-bionic . +``` + +after building the image please run container: + +```bash +$ docker run -td --rm --name nushell_package_ubuntu_bionic nushell/package:ubuntu-bionic +``` + +and copy deb package from inside: + +```bash +$ docker cp nushell_package_ubuntu_bionic:/nu_0.2.0-1_amd64.deb . +``` + +or shell inside, and test install: + +```bash +$ docker exec -it nushell_package_ubuntu_bionic bash +$ dpkg -i /nu_0.2.0-1_amd64.deb + +(Reading database ... 25656 files and directories currently installed.) +Preparing to unpack /nu_0.2.0-1_amd64.deb ... +Unpacking nu (0.2.0-1) over (0.2.0-1) ... +Setting up nu (0.2.0-1) ... +``` + +When you are finished, exit and stop the container. It will be removed since we +used `--rm`. + +```bash +$ docker stop nushell_package_ubuntu_bionic +``` + +## What should be done + +* We should run sbuild command to create chroot and then install dpkg. +For two reasons. First: we want to use the same tools as Ubuntu package builders +to handle the cornercases. Second: we want to test dpkg requirements. +https://github.com/nushell/nushell/issues/681 + +* File debian/changelog file should be generated based on git history. +https://github.com/nushell/nushell/issues/682 + +* Building package and nu version should be parametrized. +https://github.com/nushell/nushell/issues/683 \ No newline at end of file diff --git a/docs/commands/README.md b/docs/commands/README.md new file mode 100644 index 0000000000..68ef658cae --- /dev/null +++ b/docs/commands/README.md @@ -0,0 +1,25 @@ +# How do I get started? + +Pick any command from the checklist and write a comment acknowledging you started work. + +# Instructions for documenting a Nu command of your choosing + +Name the file after the command, like so: + +`command.md` + +Example: If you want to add documentation for the Nu command `enter`, create a file named `enter.md`, write documentation, save it at `/docs/commands/[your_command_picked].md` as and create your pull request. + +# What kind of documentation should I write? + +Anything you want that you believe it *best* documents the command and the way you would like to see it. Here are some of our ideas of documentation we would *love* to see (feel free to add yours): + +* Examples of using the command (max creativity welcomed!) +* Description of the command. +* Command usage. + +# Anything else? + +Of course! (These are drafts) so feel free to leave feedback and suggestions in the same file. + +Happy Documenting. diff --git a/docs/commands/add.md b/docs/commands/add.md new file mode 100644 index 0000000000..f3f080859b --- /dev/null +++ b/docs/commands/add.md @@ -0,0 +1,28 @@ +# add + +This command adds a column to any table output. The first parameter takes the heading, the second parameter takes the value for all the rows. + +## Examples + +```shell +> ls | add is_on_a_computer yes_obviously +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified │ is_on_a_computer +───┼────────────────────────────┼──────┼──────────┼────────┼───────────┼───────────┼────────────────── + 0 │ zeusiscrazy.txt │ File │ │ 556 B │ a day ago │ a day ago │ yes_obviously + 1 │ coww.txt │ File │ │ 24 B │ a day ago │ a day ago │ yes_obviously + 2 │ randomweirdstuff.txt │ File │ │ 197 B │ a day ago │ a day ago │ yes_obviously + 3 │ abaracadabra.txt │ File │ │ 401 B │ a day ago │ a day ago │ yes_obviously + 4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a day ago │ a day ago │ yes_obviously +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━ +``` + +```shell +> shells | add os linux_on_this_machine +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path │ os +───┼───┼────────────┼────────────────────────────────┼─────────────────────── + 0 │ X │ filesystem │ /home/shaurya/stuff/expr/stuff │ linux_on_this_machine + 1 │ │ filesystem │ / │ linux_on_this_machine +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━ +``` diff --git a/docs/commands/average.md b/docs/commands/average.md new file mode 100644 index 0000000000..d4095e518f --- /dev/null +++ b/docs/commands/average.md @@ -0,0 +1,45 @@ +# average +This command allows you to calculate the average of values in a column. + +## Examples +To get the average of the file sizes in a directory, simply pipe the size column from the ls command to the average command. + +```shell +> ls | get size | average +━━━━━━━━━ + +━━━━━━━━━ +2282.727272727273 +━━━━━━━━━ +``` + +```shell +> pwd | split-row / | size | get chars | average +━━━━━━━━━ + +━━━━━━━━━ +5.250000000000000 +━━━━━━━━━ +``` + +Note that average only works for integer and byte values. If the shell doesn't recognize the values in a column as one of those types, it will return an error. +One way to solve this is to convert each row to an integer when possible and then pipe the result to `average` + +```shell +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average +error: Unrecognized type in stream: Primitive(String("2509000000")) +- shell:1:0 +1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average + | ^^^^ source +``` + +```shell +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | str --to-int | average +━━━━━━━━━━━━━━━━━━━ + +─────────────────── + 3239404444.000000 +━━━━━━━━━━━━━━━━━━━ +``` + + diff --git a/docs/commands/cd.md b/docs/commands/cd.md new file mode 100644 index 0000000000..2e5d933f47 --- /dev/null +++ b/docs/commands/cd.md @@ -0,0 +1,33 @@ +# cd + +If you didn't already know, the `cd` command is very simple. It stands for 'change directory' and it does exactly that. It changes the current directory to the one specified. If no directory is specified, it takes you to the home directory. Additionally, using `cd ..` takes you to the parent directory. + +## Examples + +```shell +/home/username> cd Desktop +/home/username/Desktop> now your current directory has been changed +``` + +```shell +/home/username/Desktop/nested/folders> cd .. +/home/username/Desktop/nested> cd .. +/home/username/Desktop> cd ../Documents/school_related +/home/username/Documents/school_related> cd ../../.. +/home/> +``` + +```shell +/home/username/Desktop/super/duper/crazy/nested/folders> cd +/home/username> cd ../../usr +/usr> cd +/home/username> +``` + +Using `cd -` will take you to the previous directory: + +```shell +/home/username/Desktop/super/duper/crazy/nested/folders> cd +/home/username> cd - +/home/username/Desktop/super/duper/crazy/nested/folders> cd +``` diff --git a/docs/commands/date.md b/docs/commands/date.md new file mode 100644 index 0000000000..4263fd7e37 --- /dev/null +++ b/docs/commands/date.md @@ -0,0 +1,34 @@ +# date + +Use `date` to get the current date and time. Defaults to local timezone but you can get it in UTC too. + +## Flags + + --utc + Returns the current date and time in UTC + + --local + Returns the current date and time in your local timezone + +## Examples + +```shell +> date +━━━━━━┯━━━━━━━┯━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━ + year │ month │ day │ hour │ minute │ second │ timezone +──────┼───────┼─────┼──────┼────────┼────────┼────────── + 2019 │ 9 │ 30 │ 21 │ 52 │ 30 │ -03:00 +━━━━━━┷━━━━━━━┷━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━ +> date --utc +━━━━━━┯━━━━━━━┯━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━ + year │ month │ day │ hour │ minute │ second │ timezone +──────┼───────┼─────┼──────┼────────┼────────┼────────── + 2019 │ 10 │ 1 │ 0 │ 52 │ 32 │ UTC +━━━━━━┷━━━━━━━┷━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━ +> date --local +━━━━━━┯━━━━━━━┯━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━ + year │ month │ day │ hour │ minute │ second │ timezone +──────┼───────┼─────┼──────┼────────┼────────┼────────── + 2019 │ 9 │ 30 │ 21 │ 52 │ 34 │ -03:00 +━━━━━━┷━━━━━━━┷━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━ +``` diff --git a/docs/commands/echo.md b/docs/commands/echo.md new file mode 100644 index 0000000000..d6ca3774c0 --- /dev/null +++ b/docs/commands/echo.md @@ -0,0 +1,12 @@ +# echo + +Use `echo` to repeat arguments back to the user + +## Examples + +```shell +> echo Hello world +Hello world +> echo "Hello, world!" +Hello, world! +``` \ No newline at end of file diff --git a/docs/commands/edit.md b/docs/commands/edit.md new file mode 100644 index 0000000000..5cfeeb55fe --- /dev/null +++ b/docs/commands/edit.md @@ -0,0 +1,45 @@ +# edit + +Edits an existing column on a table. First parameter is the column to edit and the second parameter is the value to put. + +## Examples + +```shell +> ls +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼────────────────────────────┼──────┼──────────┼────────┼───────────┼─────────── + 0 │ zeusiscrazy.txt │ File │ │ 556 B │ a day ago │ a day ago + 1 │ coww.txt │ File │ │ 24 B │ a day ago │ a day ago + 2 │ randomweirdstuff.txt │ File │ │ 197 B │ a day ago │ a day ago + 3 │ abaracadabra.txt │ File │ │ 401 B │ a day ago │ a day ago + 4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a day ago │ a day ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━ +> ls | edit modified neverrrr +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼────────────────────────────┼──────┼──────────┼────────┼───────────┼────────── + 0 │ zeusiscrazy.txt │ File │ │ 556 B │ a day ago │ neverrrr + 1 │ coww.txt │ File │ │ 24 B │ a day ago │ neverrrr + 2 │ randomweirdstuff.txt │ File │ │ 197 B │ a day ago │ neverrrr + 3 │ abaracadabra.txt │ File │ │ 401 B │ a day ago │ neverrrr + 4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a day ago │ neverrrr +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━ +``` + +```shell +> shells +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────────────── + 0 │ X │ filesystem │ /home/username/stuff/expr/stuff + 1 │ │ filesystem │ / +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | edit " " X | edit path / +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━ + # │ │ name │ path +───┼───┼────────────┼────── + 0 │ X │ filesystem │ / + 1 │ X │ filesystem │ / +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━ +``` diff --git a/docs/commands/enter.md b/docs/commands/enter.md new file mode 100644 index 0000000000..426fe0ec4b --- /dev/null +++ b/docs/commands/enter.md @@ -0,0 +1,39 @@ +# enter + +This command creates a new shell and begin at this path. + +## Examples + +```shell +/home/foobar> cat user.json +{ + "Name": "Peter", + "Age": 30, + "Telephone": 88204828, + "Country": "Singapore" +} +/home/foobar> enter user.json +/> ls +━━━━━━━┯━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━ + Name │ Age │ Telephone │ Country +───────┼─────┼───────────┼─────────── + Peter │ 30 │ 88204828 │ Singapore +━━━━━━━┷━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━ +/> exit +/home/foobar> +``` + +It also provides the ability to work with multiple directories at the same time. This command will allow you to create a new "shell" and enter it at the specified path. You can toggle between this new shell and the original shell with the `p` (for previous) and `n` (for next), allowing you to navigate around a ring buffer of shells. Once you're done with a shell, you can `exit` it and remove it from the ring buffer. + +```shell +/> enter /tmp +/tmp> enter /usr +/usr> enter /bin +/bin> enter /opt +/opt> p +/bin> p +/usr> p +/tmp> p +/> n +/tmp> +``` diff --git a/docs/commands/env.md b/docs/commands/env.md new file mode 100644 index 0000000000..5dd08fac51 --- /dev/null +++ b/docs/commands/env.md @@ -0,0 +1,27 @@ +# env + +The `env` command prints to terminal the environment of nushell + +This includes +- cwd : the path to the current working the directory (`cwd`), +- home : the path to the home directory +- config : the path to the config file for nushell +- history : the path to the nushell command history +- temp : the path to the temp file +- vars : descriptor variable for the table + +`env` does not take any arguments, and ignores any arguments given. + + +## Examples - + + +```shell +/home/username/mynushell/docs/commands(master)> env +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━━━━━━━ + cwd │ home │ config │ history │ temp │ vars +────────────────────────────────────────┼────────────────┼───────────────────────────────────────┼────────────────────────────────────────────┼──────┼──────────────── + /home/username/mynushell/docs/commands │ /home/username │ /home/username/.config/nu/config.toml │ /home/username/.local/share/nu/history.txt │ /tmp │ [table: 1 row] +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━━━━━━━ +``` + diff --git a/docs/commands/exit.md b/docs/commands/exit.md new file mode 100644 index 0000000000..0238204f28 --- /dev/null +++ b/docs/commands/exit.md @@ -0,0 +1,30 @@ +# exit + +Exits the nu shell. If you have multiple nu shells, use `exit --now` to exit all of them. + +## Examples + +```shell +> exit +``` + +``` +/home/username/stuff/books> shells +---+---+------------+---------------------------- + # | | name | path +---+---+------------+---------------------------- + 0 | | filesystem | /home/username/stuff/notes + 1 | | filesystem | /home/username/stuff/videos + 2 | X | filesystem | /home/username/stuff/books +---+---+------------+---------------------------- +/home/username/stuff/books> exit +/home/username/stuff/videos> shells +---+---+------------+---------------------------- + # | | name | path +---+---+------------+---------------------------- + 0 | | filesystem | /home/username/stuff/notes + 1 | X | filesystem | /home/username/stuff/videos +---+---+------------+---------------------------- +/home/username/stuff/videos> exit --now +exits both the shells +``` diff --git a/docs/commands/fetch.md b/docs/commands/fetch.md new file mode 100644 index 0000000000..8d81e96044 --- /dev/null +++ b/docs/commands/fetch.md @@ -0,0 +1,32 @@ +# fetch + +This command loads from a URL into a cell, convert it to table if possible (avoid by appending `--raw` flag) + +## Examples + +```shell +> fetch http://headers.jsontest.com +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━ + X-Cloud-Trace-Context │ Accept │ Host │ Content-Length │ user-agent +───────────────────────────────────────────────────────┼────────┼──────────────────────┼────────────────┼───────────────────────── + aeee1a8abf08820f6fe19d114dc3bb87/16772233176633589121 │ */* │ headers.jsontest.com │ 0 │ curl/7.54.0 isahc/0.7.1 +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━ +> fetch http://headers.jsontest.com --raw +{ + "X-Cloud-Trace-Context": "aeee1a8abf08820f6fe19d114dc3bb87/16772233176633589121", + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3", + "Upgrade-Insecure-Requests": "1", + "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36", + "Host": "headers.jsontest.com", + "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8" +} +``` + +```shell +> fetch https://www.jonathanturner.org/feed.xml +━━━━━━━━━━━━━━━━ + rss +──────────────── + [table: 1 row] +━━━━━━━━━━━━━━━━ +``` \ No newline at end of file diff --git a/docs/commands/first.md b/docs/commands/first.md new file mode 100644 index 0000000000..d295c8fd53 --- /dev/null +++ b/docs/commands/first.md @@ -0,0 +1,28 @@ +# first + +Use `first` to retrieve the first "n" rows of a table. `first` has a required amount parameter that indicates how many rows you would like returned. If more than one row is returned, an index column will be included showing the row number. + +## Examples + +```shell +> ps | first 1 +━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━ + pid │ name │ status │ cpu +───────┼──────────────┼─────────┼─────────────────── + 60358 │ nu_plugin_ps │ Running │ 5.399802999999999 +━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━ +``` + +```shell +> ps | first 5 +━━━┯━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━ + # │ pid │ name │ status │ cpu +───┼───────┼──────────────┼─────────┼─────────────────── + 0 │ 60754 │ nu_plugin_ps │ Running │ 4.024156000000000 + 1 │ 60107 │ quicklookd │ Running │ 0.000000000000000 + 2 │ 59356 │ nu │ Running │ 0.000000000000000 + 3 │ 59216 │ zsh │ Running │ 0.000000000000000 + 4 │ 59162 │ vim │ Running │ 0.000000000000000 +━━━┷━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━ +``` + diff --git a/docs/commands/from-csv.md b/docs/commands/from-csv.md new file mode 100644 index 0000000000..86d309d86b --- /dev/null +++ b/docs/commands/from-csv.md @@ -0,0 +1,47 @@ +# from-csv + +Converts csv data into table. Use this when nushell cannot dertermine the input file extension. + +## Example +Let's say we have the following file : +```shell +> cat pets.txt +animal, name, age +cat, Tom, 7 +dog, Alfred, 10 +chameleon, Linda, 1 +``` + +`pets.txt` is actually a .csv file but it has the .txt extension, `open` is not able to convert it into a table : + +```shell +> open pets.txt +animal, name, age +cat, Tom, 7 +dog, Alfred, 10 +chameleon, Linda, 1 +``` + +To get a table from `pets.txt` we need to use the `from-csv` command : + +```shell +> open pets.txt | from-csv +━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━ + # │ animal │ name │ age +───┼───────────┼─────────┼────── + 0 │ cat │ Tom │ 7 + 1 │ dog │ Alfred │ 10 + 2 │ chameleon │ Linda │ 1 +━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━ +``` + +To ignore the csv headers use `--headerless` : +```shell +━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━ + # │ Column1 │ Column2 │ Column3 +───┼───────────┼─────────┼───────── + 0 │ dog │ Alfred │ 10 + 1 │ chameleon │ Linda │ 1 +━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━ +``` + diff --git a/docs/commands/from-toml.md b/docs/commands/from-toml.md new file mode 100644 index 0000000000..d3f3364c78 --- /dev/null +++ b/docs/commands/from-toml.md @@ -0,0 +1,23 @@ +# from-toml +Converts toml data into table. Use this when nushell cannot dertermine the input file extension. + +## Example +Let's say we have the following Rust .lock file : +```shell +> open Cargo.lock +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. [[package]] name = "adler32" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" +... +``` + +The "Cargo.lock" file is actually a .toml file, but the file extension isn't .toml. That's okay, we can use the `from-toml` command : + + +```shell +> open Cargo.lock | from-toml +━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━ + metadata │ package +────────────────┼─────────────────── + [table: 1 row] │ [table: 154 rows] +━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━ +``` \ No newline at end of file diff --git a/docs/commands/help.md b/docs/commands/help.md new file mode 100644 index 0000000000..a232910c72 --- /dev/null +++ b/docs/commands/help.md @@ -0,0 +1,47 @@ +# help + +Use `help` for more information on a command. +Use `help commands` to list all availble commands. +Use `help ` to display help about a particular command. + +## Examples + +```shell +> help +Welcome to Nushell. + +Here are some tips to help you get started. + * help commands - list all available commands + * help - display help about a particular command + +You can also learn more at https://book.nushell.sh +``` + +```shell +> help commands +━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ name │ description +────┼──────────────┼──────────────────────────────────────────────────────────────────────────────────────── + 0 │ add │ Add a new field to the table. + 1 │ autoview │ View the contents of the pipeline as a table or list. + 2 │ cd │ Change to a new path. + 3 │ config │ Configuration management. + 4 │ cp │ Copy files. + 5 │ date │ Get the current datetime. +... + 70 │ trim │ Trim leading and following whitespace from text data. + 71 │ version │ Display Nu version + 72 │ where │ Filter table to match the condition. + 73 │ which │ Finds a program file. +━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +``` + +```shell +> help cd +Change to a new path. + +Usage: + > cd (directory) +``` + + diff --git a/docs/commands/inc.md b/docs/commands/inc.md new file mode 100644 index 0000000000..c6dcb8d806 --- /dev/null +++ b/docs/commands/inc.md @@ -0,0 +1,31 @@ +# inc + +This command increments the value of variable by one. + +## Examples + +```shell +> open rustfmt.toml +--------- + edition +--------- + 2018 +--------- +> open rustfmt.toml | inc edition +--------- + edition +--------- + 2019 +--------- +``` + +```shell +> open Cargo.toml | get package.version +0.1.3 +> open Cargo.toml | inc package.version --major | get package.version +1.0.0 +> open Cargo.toml | inc package.version --minor | get package.version +0.2.0 +> open Cargo.toml | inc package.version --patch | get package.version +0.1.4 +``` \ No newline at end of file diff --git a/docs/commands/last.md b/docs/commands/last.md new file mode 100644 index 0000000000..bc7a55f12b --- /dev/null +++ b/docs/commands/last.md @@ -0,0 +1,29 @@ +# last + +Use `last` to retrieve the last "n" rows of a table. `last` has a required amount parameter that indicates how many rows you would like returned. If more than one row is returned, an index column will be included showing the row number. `last` does not alter the order of the rows of the table. + +## Examples + +```shell +> ps | last 1 +━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━ + pid │ name │ status │ cpu +─────┼─────────────┼─────────┼─────────────────── + 121 │ loginwindow │ Running │ 0.000000000000000 +━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━ +``` + +```shell +> ps | last 5 +━━━┯━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━ + # │ pid │ name │ status │ cpu +───┼─────┼────────────────┼─────────┼─────────────────── + 0 │ 360 │ CommCenter │ Running │ 0.000000000000000 + 1 │ 358 │ distnoted │ Running │ 0.000000000000000 + 2 │ 356 │ UserEventAgent │ Running │ 0.000000000000000 + 3 │ 354 │ cfprefsd │ Running │ 0.000000000000000 + 4 │ 121 │ loginwindow │ Running │ 0.000000000000000 +━━━┷━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━ +``` + + diff --git a/docs/commands/lines.md b/docs/commands/lines.md new file mode 100644 index 0000000000..7153900e3a --- /dev/null +++ b/docs/commands/lines.md @@ -0,0 +1,28 @@ +# lines +This command takes a string from a pipeline as input, and returns a table where each line of the input string is a row in the table. Empty lines are ignored. This command is capable of feeding other commands, such as `nth`, with its output. + +## Usage +```shell +> [input-command] | lines +``` + +## Examples +Basic usage: +```shell +> printf "Hello\nWorld!\nLove, nushell." | lines +━━━┯━━━━━━━━━━━━━━━━ + # │ value +───┼──────────────── + 0 │ Hello + 1 │ World! + 2 │ Love, nushell. +━━━┷━━━━━━━━━━━━━━━━ +``` + +One useful application is piping the contents of file into `lines`. This example extracts a certain line from a given file. +```shell +> cat lines.md | lines | nth 6 +## Examples +``` + +Similarly to this example, `lines` can be used to extract certain portions of or apply transformations to data returned by any program which returns a string. diff --git a/docs/commands/nth.md b/docs/commands/nth.md new file mode 100644 index 0000000000..0c8ce57f0c --- /dev/null +++ b/docs/commands/nth.md @@ -0,0 +1,31 @@ +# nth + +This command returns the nth row of a table, starting from 0. +If the number given is less than 0 or more than the number of rows, nothing is returned. + +## Usage +```shell +> [input-command] | nth [row-number] +``` + +## Examples +```shell +> ls +━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼────────────┼───────────┼──────────┼────────┼───────────────┼─────────────── + 0 │ Cargo.toml │ File │ │ 239 B │ 2 minutes ago │ 2 minutes ago + 1 │ .git │ Directory │ │ 4.1 KB │ 2 minutes ago │ 2 minutes ago + 2 │ .gitignore │ File │ │ 19 B │ 2 minutes ago │ 2 minutes ago + 3 │ src │ Directory │ │ 4.1 KB │ 2 minutes ago │ 2 minutes ago +━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━ + +> ls | nth 0 +━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━ + name │ type │ readonly │ size │ accessed │ modified +────────────┼──────┼──────────┼────────┼───────────────┼─────────────── + Cargo.toml │ File │ │ 239 B │ 2 minutes ago │ 2 minutes ago +━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━ + +> ls | nth 5 +``` \ No newline at end of file diff --git a/docs/commands/open.md b/docs/commands/open.md new file mode 100644 index 0000000000..61b5d1748b --- /dev/null +++ b/docs/commands/open.md @@ -0,0 +1,95 @@ +# open + +Loads a file into a cell, convert it to table if possible (avoid by appending `--raw` flag) + +## Example + +```shell +> cat user.yaml +- Name: Peter + Age: 30 + Telephone: 88204828 + Country: Singapore +- Name: Michael + Age: 42 + Telephone: 44002010 + Country: Spain +- Name: Will + Age: 50 + Telephone: 99521080 + Country: Germany +> open user.yaml +━━━┯━━━━━━━━━┯━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━ + # │ Name │ Age │ Telephone │ Country +───┼─────────┼─────┼───────────┼─────────── + 0 │ Peter │ 30 │ 88204828 │ Singapore + 1 │ Michael │ 42 │ 44002010 │ Spain + 2 │ Will │ 50 │ 99521080 │ Germany +━━━┷━━━━━━━━━┷━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━ +> open user.yaml --raw +- Name: Peter + Age: 30 + Telephone: 88204828 + Country: Singapore +- Name: Michael + Age: 42 + Telephone: 44002010 + Country: Spain +- Name: Will + Age: 50 + Telephone: 99521080 + Country: Germany +``` + +```shell +> cat user.json +[ + { + "Name": "Peter", + "Age": 30, + "Telephone": 88204828, + "Country": "Singapore" + }, + { + "Name": "Michael", + "Age": 42, + "Telephone": 44002010, + "Country": "Spain" + }, + { + "Name": "Will", + "Age": 50, + "Telephone": 99521080, + "Country": "Germany" + } +] +> open user.json +━━━┯━━━━━━━━━┯━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━ + # │ Name │ Age │ Telephone │ Country +───┼─────────┼─────┼───────────┼─────────── + 0 │ Peter │ 30 │ 88204828 │ Singapore + 1 │ Michael │ 42 │ 44002010 │ Spain + 2 │ Will │ 50 │ 99521080 │ Germany +━━━┷━━━━━━━━━┷━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━ +> open user.json --raw +[ + { + "Name": "Peter", + "Age": 30, + "Telephone": 88204828, + "Country": "Singapore" + }, + { + "Name": "Michael", + "Age": 42, + "Telephone": 44002010, + "Country": "Spain" + }, + { + "Name": "Will", + "Age": 50, + "Telephone": 99521080, + "Country": "Germany" + } +] +``` \ No newline at end of file diff --git a/docs/commands/reverse.md b/docs/commands/reverse.md new file mode 100644 index 0000000000..546f251568 --- /dev/null +++ b/docs/commands/reverse.md @@ -0,0 +1,51 @@ +# reverse + +This command reverses the order of the elements in a sorted table. + +## Examples + +```shell +> ls | sort-by name +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼────────────────────────────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ abaracadabra.txt │ File │ │ 401 B │ 23 minutes ago │ 16 minutes ago + 1 │ coww.txt │ File │ │ 24 B │ 22 minutes ago │ 17 minutes ago + 2 │ randomweirdstuff.txt │ File │ │ 197 B │ 21 minutes ago │ 18 minutes ago + 3 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ 30 seconds ago │ now + 4 │ zeusiscrazy.txt │ File │ │ 556 B │ 22 minutes ago │ 18 minutes ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +> ls | sort-by name | reverse +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼────────────────────────────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ zeusiscrazy.txt │ File │ │ 556 B │ 22 minutes ago │ 19 minutes ago + 1 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ 39 seconds ago │ 18 seconds ago + 2 │ randomweirdstuff.txt │ File │ │ 197 B │ 21 minutes ago │ 18 minutes ago + 3 │ coww.txt │ File │ │ 24 B │ 22 minutes ago │ 18 minutes ago + 4 │ abaracadabra.txt │ File │ │ 401 B │ 23 minutes ago │ 16 minutes ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +``` + +```shell +> ls | sort-by size +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼────────────────────────────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ coww.txt │ File │ │ 24 B │ 22 minutes ago │ 18 minutes ago + 1 │ randomweirdstuff.txt │ File │ │ 197 B │ 21 minutes ago │ 18 minutes ago + 2 │ abaracadabra.txt │ File │ │ 401 B │ 23 minutes ago │ 16 minutes ago + 3 │ zeusiscrazy.txt │ File │ │ 556 B │ 22 minutes ago │ 19 minutes ago + 4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a minute ago │ 26 seconds ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +> ls | sort-by size | reverse +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼────────────────────────────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a minute ago │ 32 seconds ago + 1 │ zeusiscrazy.txt │ File │ │ 556 B │ 22 minutes ago │ 19 minutes ago + 2 │ abaracadabra.txt │ File │ │ 401 B │ 23 minutes ago │ 16 minutes ago + 3 │ randomweirdstuff.txt │ File │ │ 197 B │ 21 minutes ago │ 18 minutes ago + 4 │ coww.txt │ File │ │ 24 B │ 22 minutes ago │ 18 minutes ago +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +``` diff --git a/docs/commands/shells.md b/docs/commands/shells.md new file mode 100644 index 0000000000..b9fde457b3 --- /dev/null +++ b/docs/commands/shells.md @@ -0,0 +1,26 @@ +# shells + +Lists all the active nu shells with a number/index, a name and the path. Also marks the current nu shell. + +## Examples + +``` +> shells +---+---+------------+--------------- + # | | name | path +---+---+------------+--------------- + 0 | | filesystem | /usr + 1 | | filesystem | /home + 2 | X | filesystem | /home/username +---+---+------------+--------------- +``` + +``` +/> shells +---+---+-------------------------------------------------+------------------------------------ + # | | name | path +---+---+-------------------------------------------------+------------------------------------ + 0 | | filesystem | /Users/username/Code/nushell + 1 | X | {/Users/username/Code/nushell/Cargo.toml} | / +---+---+-------------------------------------------------+------------------------------------ +``` diff --git a/docs/commands/sort-by.md b/docs/commands/sort-by.md new file mode 100644 index 0000000000..1f0f3da9ed --- /dev/null +++ b/docs/commands/sort-by.md @@ -0,0 +1,56 @@ + +# env + +The `sort-by` command sorts the table being displayed in the terminal by a chosen column(s). + +`sort-by` takes multiple arguments (being the names of columns) sorting by each argument in order. + + +## Examples - + +```shell +/home/example> ls | sort-by size +━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼──────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ az │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 1 │ a │ File │ │ 18 B │ 4 minutes ago │ 38 minutes ago + 2 │ ad │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 3 │ ac │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 4 │ ab │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 5 │ c │ File │ │ 102 B │ 35 minutes ago │ 35 minutes ago + 6 │ d │ File │ │ 189 B │ 35 minutes ago │ 34 minutes ago + 7 │ b │ File │ │ 349 B │ 35 minutes ago │ 35 minutes ago +━━━┷━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +``` + +```shell +/home/example> ls | sort-by size name +━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼──────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ a │ File │ │ 18 B │ 4 minutes ago │ 39 minutes ago + 1 │ ab │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 2 │ ac │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 3 │ ad │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 4 │ az │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago + 5 │ c │ File │ │ 102 B │ 36 minutes ago │ 35 minutes ago + 6 │ d │ File │ │ 189 B │ 35 minutes ago │ 35 minutes ago + 7 │ b │ File │ │ 349 B │ 36 minutes ago │ 36 minutes ago +``` + +``` +/home/example> ls | sort-by accessed +━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + # │ name │ type │ readonly │ size │ accessed │ modified +───┼──────┼──────┼──────────┼────────┼────────────────┼──────────────── + 0 │ b │ File │ │ 349 B │ 37 minutes ago │ 37 minutes ago + 1 │ c │ File │ │ 102 B │ 37 minutes ago │ 37 minutes ago + 2 │ d │ File │ │ 189 B │ 37 minutes ago │ 36 minutes ago + 3 │ a │ File │ │ 18 B │ 6 minutes ago │ 40 minutes ago + 4 │ ab │ File │ │ 18 B │ 6 minutes ago │ 6 minutes ago + 5 │ ac │ File │ │ 18 B │ 6 minutes ago │ 6 minutes ago + 6 │ ad │ File │ │ 18 B │ 5 minutes ago │ 5 minutes ago + 7 │ az │ File │ │ 18 B │ 5 minutes ago │ 5 minutes ago +━━━┷━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +``` \ No newline at end of file diff --git a/docs/commands/sum.md b/docs/commands/sum.md new file mode 100644 index 0000000000..7482ca0c54 --- /dev/null +++ b/docs/commands/sum.md @@ -0,0 +1,44 @@ +# sum +This command allows you to calculate the sum of values in a column. + +## Examples +To get the sum of the file sizes in a directory, simply pipe the size column from the ls command to the sum command. + +```shell +> ls | get size | sum +━━━━━━━━━ + value +━━━━━━━━━ + 51.0 MB +━━━━━━━━━ +``` + +To get the sum of the characters that make up your present working directory. +```shell +> pwd | split-row / | size | get chars | sum +━━━━━━━━━ + +━━━━━━━━━ +21 +━━━━━━━━━ +``` + +Note that sum only works for integer and byte values. If the shell doesn't recognize the values in a column as one of those types, it will return an error. +One way to solve this is to convert each row to an integer when possible and then pipe the result to `sum` + +```shell +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum +error: Unrecognized type in stream: Primitive(String("2509000000")) +- shell:1:0 +1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum + | ^^^^ source +``` + +```shell +> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | str --to-int | sum +━━━━━━━━━━━━━ + +───────────── + 29154639996 +━━━━━━━━━━━━━ +``` diff --git a/docs/commands/sys.md b/docs/commands/sys.md new file mode 100644 index 0000000000..b21a0ef219 --- /dev/null +++ b/docs/commands/sys.md @@ -0,0 +1,32 @@ +# sys + +This command gives information about the system where nu is running on. + +## Examples + +```shell +> sys +━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + host │ cpu │ disks │ mem │ net │ battery +────────────────┼────────────────┼─────────────────┼────────────────┼──────────────────┼──────────────── + [table: 1 row] │ [table: 1 row] │ [table: 3 rows] │ [table: 1 row] │ [table: 18 rows] │ [table: 1 row] +━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +> sys | get host +━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━ + name │ release │ hostname │ arch │ uptime │ users +────────┼─────────┼──────────────┼────────┼────────────────┼────────────────── + Darwin │ 18.7.0 │ C02Y437GJGH6 │ x86_64 │ [table: 1 row] │ [table: 17 rows] +━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━ +> sys | get cpu +━━━━━━━┯━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━ + cores │ current ghz │ min ghz │ max ghz +───────┼───────────────────┼───────────────────┼─────────────────── + 12 │ 2.600000000000000 │ 2.600000000000000 │ 2.600000000000000 +━━━━━━━┷━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━ +> sys | get mem +━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━ + total │ free │ swap total │ swap free +─────────┼──────────┼────────────┼─────────── + 34.4 GB │ 545.0 MB │ 2.1 GB │ 723.0 MB +━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━ +``` \ No newline at end of file diff --git a/docs/commands/to-csv.md b/docs/commands/to-csv.md new file mode 100644 index 0000000000..2be6390fa8 --- /dev/null +++ b/docs/commands/to-csv.md @@ -0,0 +1,80 @@ +# to-csv + +Converts table data into csv text. + +## Example + +```shell +> shells +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────── + 0 │ X │ filesystem │ /home/shaurya + 1 │ │ filesystem │ /home/shaurya/Pictures + 2 │ │ filesystem │ /home/shaurya/Desktop +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | to-csv + ,name,path +X,filesystem,/home/shaurya + ,filesystem,/home/shaurya/Pictures + ,filesystem,/home/shaurya/Desktop +``` + +```shell +> open caco3_plastics.csv +━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━━━━ + # │ importer │ shipper │ tariff_item │ name │ origin │ shipped_at │ arrived_at │ net_weight │ fob_price │ cif_price │ cif_per_net_ + │ │ │ │ │ │ │ │ │ │ │ weight +───┼──────────────┼──────────────┼─────────────┼──────────────┼──────────┼────────────┼────────────┼────────────┼───────────┼───────────┼────────────── + 0 │ PLASTICOS │ S A REVERTE │ 2509000000 │ CARBONATO DE │ SPAIN │ 18/03/2016 │ 17/04/2016 │ 81,000.00 │ 14,417.58 │ 18,252.34 │ 0.23 + │ RIVAL CIA │ │ │ CALCIO TIPO │ │ │ │ │ │ │ + │ LTDA │ │ │ CALCIPORE │ │ │ │ │ │ │ + │ │ │ │ 160 T AL │ │ │ │ │ │ │ + 1 │ MEXICHEM │ OMYA ANDINA │ 2836500000 │ CARBONATO │ COLOMBIA │ 07/07/2016 │ 10/07/2016 │ 26,000.00 │ 7,072.00 │ 8,127.18 │ 0.31 + │ ECUADOR S.A. │ S A │ │ │ │ │ │ │ │ │ + 2 │ PLASTIAZUAY │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 27/07/2016 │ 09/08/2016 │ 81,000.00 │ 8,100.00 │ 11,474.55 │ 0.14 + │ SA │ │ │ CALCIO │ │ │ │ │ │ │ + 3 │ PLASTICOS │ AND │ 2836500000 │ CALCIUM │ TURKEY │ 04/10/2016 │ 11/11/2016 │ 100,000.00 │ 17,500.00 │ 22,533.75 │ 0.23 + │ RIVAL CIA │ ENDUSTRIYEL │ │ CARBONATE │ │ │ │ │ │ │ + │ LTDA │ HAMMADDELER │ │ ANADOLU │ │ │ │ │ │ │ + │ │ DIS TCARET │ │ ANDCARB CT-1 │ │ │ │ │ │ │ + │ │ LTD.STI. │ │ │ │ │ │ │ │ │ + 4 │ QUIMICA │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 24/06/2016 │ 12/07/2016 │ 27,000.00 │ 3,258.90 │ 5,585.00 │ 0.21 + │ COMERCIAL │ │ │ CALCIO │ │ │ │ │ │ │ + │ QUIMICIAL │ │ │ │ │ │ │ │ │ │ + │ CIA. LTDA. │ │ │ │ │ │ │ │ │ │ + 5 │ PICA │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 18/01/2016 │ 66,500.00 │ 12,635.00 │ 18,670.52 │ 0.28 + │ PLASTICOS │ S.A │ │ CALCIO │ │ │ │ │ │ │ + │ INDUSTRIALES │ │ │ │ │ │ │ │ │ │ + │ C.A. │ │ │ │ │ │ │ │ │ │ + 6 │ PLASTIQUIM │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 25/10/2016 │ 33,000.00 │ 6,270.00 │ 9,999.00 │ 0.30 + │ S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │ + │ │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │ + │ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │ + │ │ │ │ ESTEARICO │ │ │ │ │ │ │ + │ │ │ │ OMYA CARB 1T │ │ │ │ │ │ │ + │ │ │ │ CG BBS 1000 │ │ │ │ │ │ │ + 7 │ QUIMICOS │ SIBELCO │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/11/2016 │ 03/11/2016 │ 52,000.00 │ 8,944.00 │ 13,039.05 │ 0.25 + │ ANDINOS │ COLOMBIA SAS │ │ CALCIO │ │ │ │ │ │ │ + │ QUIMANDI │ │ │ RECUBIERTO │ │ │ │ │ │ │ + │ S.A. │ │ │ │ │ │ │ │ │ │ + 8 │ TIGRE │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 28/10/2016 │ 66,000.00 │ 11,748.00 │ 18,216.00 │ 0.28 + │ ECUADOR S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │ + │ ECUATIGRE │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │ + │ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │ + │ │ │ │ ESTEARICO │ │ │ │ │ │ │ + │ │ │ │ OMYACARB 1T │ │ │ │ │ │ │ + │ │ │ │ CG BPA 25 NO │ │ │ │ │ │ │ +━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━━━━ +> open caco3_plastics.csv | to-csv +importer,shipper,tariff_item,name,origin,shipped_at,arrived_at,net_weight,fob_price,cif_price,cif_per_net_weight +PLASTICOS RIVAL CIA LTDA,S A REVERTE,2509000000,CARBONATO DE CALCIO TIPO CALCIPORE 160 T AL,SPAIN,18/03/2016,17/04/2016,"81,000.00","14,417.58","18,252.34",0.23 +MEXICHEM ECUADOR S.A.,OMYA ANDINA S A,2836500000,CARBONATO,COLOMBIA,07/07/2016,10/07/2016,"26,000.00","7,072.00","8,127.18",0.31 +PLASTIAZUAY SA,SA REVERTE,2836500000,CARBONATO DE CALCIO,SPAIN,27/07/2016,09/08/2016,"81,000.00","8,100.00","11,474.55",0.14 +PLASTICOS RIVAL CIA LTDA,AND ENDUSTRIYEL HAMMADDELER DIS TCARET LTD.STI.,2836500000,CALCIUM CARBONATE ANADOLU ANDCARB CT-1,TURKEY,04/10/2016,11/11/2016,"100,000.00","17,500.00","22,533.75",0.23 +QUIMICA COMERCIAL QUIMICIAL CIA. LTDA.,SA REVERTE,2836500000,CARBONATO DE CALCIO,SPAIN,24/06/2016,12/07/2016,"27,000.00","3,258.90","5,585.00",0.21 +PICA PLASTICOS INDUSTRIALES C.A.,OMYA ANDINA S.A,3824909999,CARBONATO DE CALCIO,COLOMBIA,01/01/1900,18/01/2016,"66,500.00","12,635.00","18,670.52",0.28 +PLASTIQUIM S.A.,OMYA ANDINA S.A NIT 830.027.386-6,3824909999,CARBONATO DE CALCIO RECUBIERTO CON ACIDO ESTEARICO OMYA CARB 1T CG BBS 1000,COLOMBIA,01/01/1900,25/10/2016,"33,000.00","6,270.00","9,999.00",0.30 +QUIMICOS ANDINOS QUIMANDI S.A.,SIBELCO COLOMBIA SAS,3824909999,CARBONATO DE CALCIO RECUBIERTO,COLOMBIA,01/11/2016,03/11/2016,"52,000.00","8,944.00","13,039.05",0.25 +TIGRE ECUADOR S.A. ECUATIGRE,OMYA ANDINA S.A NIT 830.027.386-6,3824909999,CARBONATO DE CALCIO RECUBIERTO CON ACIDO ESTEARICO OMYACARB 1T CG BPA 25 NO,COLOMBIA,01/01/1900,28/10/2016,"66,000.00","11,748.00","18,216.00",0.28 +``` diff --git a/docs/commands/to-json.md b/docs/commands/to-json.md new file mode 100644 index 0000000000..eaf1cdb26a --- /dev/null +++ b/docs/commands/to-json.md @@ -0,0 +1,40 @@ +# to-json + +Converts table data into json text. + +## Example + +```shell +> shells +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────── + 0 │ X │ filesystem │ /home/shaurya + 1 │ │ filesystem │ /home/shaurya/Pictures + 2 │ │ filesystem │ /home/shaurya/Desktop +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | to-json +[{" ":"X","name":"filesystem","path":"/home/shaurya"},{" ":" ","name":"filesystem","path":"/home/shaurya/Pictures"},{" ":" ","name":"filesystem","path":"/home/shaurya/Desktop"}] +``` + +```shell +> open sgml_description.json +━━━━━━━━━━━━━━━━ + glossary +──────────────── + [table: 1 row] +━━━━━━━━━━━━━━━━ +> open sgml_description.json | to-json +{"glossary":{"title":"example glossary","GlossDiv":{"title":"S","GlossList":{"GlossEntry":{"ID":"SGML","SortAs":"SGML","GlossTerm":"Standard Generalized Markup Language","Acronym":"SGML","Abbrev":"ISO 8879:1986","Height":10,"GlossDef":{"para":"A meta-markup language, used to create markup languages such as DocBook.","GlossSeeAlso":["GML","XML"]},"Sections":[101,102],"GlossSee":"markup"}}}}} +``` +We can also convert formats ! +```shell +> open jonathan.xml +━━━━━━━━━━━━━━━━ + rss +──────────────── + [table: 1 row] +━━━━━━━━━━━━━━━━ +> open jonathan.xml | to-json +{"rss":[{"channel":[{"title":["Jonathan Turner"]},{"link":["http://www.jonathanturner.org"]},{"link":[]},{"item":[{"title":["Creating crossplatform Rust terminal apps"]},{"description":["

\"Pikachu

\n\n

Look Mom, Pikachu running in Windows CMD!

\n\n

Part of the adventure is not seeing the way ahead and going anyway.

\n"]},{"pubDate":["Mon, 05 Oct 2015 00:00:00 +0000"]},{"link":["http://www.jonathanturner.org/2015/10/off-to-new-adventures.html"]},{"guid":["http://www.jonathanturner.org/2015/10/off-to-new-adventures.html"]}]}]}]} +``` diff --git a/docs/commands/to-toml.md b/docs/commands/to-toml.md new file mode 100644 index 0000000000..a026520696 --- /dev/null +++ b/docs/commands/to-toml.md @@ -0,0 +1,112 @@ +# to-toml + +Converts table data into toml text. + +## Example + +```shell +> shells +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────── + 0 │ X │ filesystem │ /home/shaurya + 1 │ │ filesystem │ /home/shaurya/Pictures + 2 │ │ filesystem │ /home/shaurya/Desktop +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | to-toml +[[]] +" " = "X" +name = "filesystem" +path = "/home/shaurya" + +[[]] +" " = " " +name = "filesystem" +path = "/home/shaurya/Pictures" + +[[]] +" " = " " +name = "filesystem" +path = "/home/shaurya/Desktop" + +``` + +```shell +> open cargo_sample.toml +━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━ + dependencies │ dev-dependencies │ package +────────────────┼──────────────────┼──────────────── + [table: 1 row] │ [table: 1 row] │ [table: 1 row] +━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━ +> open cargo_sample.toml | to-toml +[dependencies] +ansi_term = "0.11.0" +app_dirs = "1.2.1" +byte-unit = "2.1.0" +bytes = "0.4.12" +chrono-humanize = "0.0.11" +chrono-tz = "0.5.1" +clap = "2.33.0" +conch-parser = "0.1.1" +derive-new = "0.5.6" +dunce = "1.0.0" +futures-sink-preview = "0.3.0-alpha.16" +futures_codec = "0.2.2" +getset = "0.0.7" +git2 = "0.8.0" +itertools = "0.8.0" +lalrpop-util = "0.17.0" +language-reporting = "0.3.0" +log = "0.4.6" +logos = "0.10.0-rc2" +logos-derive = "0.10.0-rc2" +nom = "5.0.0-beta1" +ordered-float = "1.0.2" +pretty_env_logger = "0.3.0" +prettyprint = "0.6.0" +prettytable-rs = "0.8.0" +regex = "1.1.6" +rustyline = "4.1.0" +serde = "1.0.91" +serde_derive = "1.0.91" +serde_json = "1.0.39" +subprocess = "0.1.18" +sysinfo = "0.8.4" +term = "0.5.2" +tokio-fs = "0.1.6" +toml = "0.5.1" +toml-query = "0.9.0" + +[dependencies.chrono] +features = ["serde"] +version = "0.4.6" + +[dependencies.cursive] +default-features = false +features = ["pancurses-backend"] +version = "0.12.0" + +[dependencies.futures-preview] +features = ["compat", "io-compat"] +version = "0.3.0-alpha.16" + +[dependencies.indexmap] +features = ["serde-1"] +version = "1.0.2" + +[dependencies.pancurses] +features = ["win32a"] +version = "0.16" + +[dev-dependencies] +pretty_assertions = "0.6.1" + +[package] +authors = ["Yehuda Katz "] +description = "A shell for the GitHub era" +edition = "2018" +license = "ISC" +name = "nu" +version = "0.1.1" + +``` diff --git a/docs/commands/to-tsv.md b/docs/commands/to-tsv.md new file mode 100644 index 0000000000..b9e5f97d4f --- /dev/null +++ b/docs/commands/to-tsv.md @@ -0,0 +1,80 @@ +# to-tsv + +Converts table data into tsv text. + +## Example + +```shell +> shells +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────── + 0 │ X │ filesystem │ /home/shaurya + 1 │ │ filesystem │ /home/shaurya/Pictures + 2 │ │ filesystem │ /home/shaurya/Desktop +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━ +> shells |to-tsv + name path +X filesystem /home/shaurya + +``` + +```shell +> open caco3_plastics.tsv +━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━━━━ + # │ importer │ shipper │ tariff_item │ name │ origin │ shipped_at │ arrived_at │ net_weight │ fob_price │ cif_price │ cif_per_net_ + │ │ │ │ │ │ │ │ │ │ │ weight +───┼──────────────┼──────────────┼─────────────┼──────────────┼──────────┼────────────┼────────────┼────────────┼───────────┼───────────┼────────────── + 0 │ PLASTICOS │ S A REVERTE │ 2509000000 │ CARBONATO DE │ SPAIN │ 18/03/2016 │ 17/04/2016 │ 81,000.00 │ 14,417.58 │ 18,252.34 │ 0.23 + │ RIVAL CIA │ │ │ CALCIO TIPO │ │ │ │ │ │ │ + │ LTDA │ │ │ CALCIPORE │ │ │ │ │ │ │ + │ │ │ │ 160 T AL │ │ │ │ │ │ │ + 1 │ MEXICHEM │ OMYA ANDINA │ 2836500000 │ CARBONATO │ COLOMBIA │ 07/07/2016 │ 10/07/2016 │ 26,000.00 │ 7,072.00 │ 8,127.18 │ 0.31 + │ ECUADOR S.A. │ S A │ │ │ │ │ │ │ │ │ + 2 │ PLASTIAZUAY │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 27/07/2016 │ 09/08/2016 │ 81,000.00 │ 8,100.00 │ 11,474.55 │ 0.14 + │ SA │ │ │ CALCIO │ │ │ │ │ │ │ + 3 │ PLASTICOS │ AND │ 2836500000 │ CALCIUM │ TURKEY │ 04/10/2016 │ 11/11/2016 │ 100,000.00 │ 17,500.00 │ 22,533.75 │ 0.23 + │ RIVAL CIA │ ENDUSTRIYEL │ │ CARBONATE │ │ │ │ │ │ │ + │ LTDA │ HAMMADDELER │ │ ANADOLU │ │ │ │ │ │ │ + │ │ DIS TCARET │ │ ANDCARB CT-1 │ │ │ │ │ │ │ + │ │ LTD.STI. │ │ │ │ │ │ │ │ │ + 4 │ QUIMICA │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 24/06/2016 │ 12/07/2016 │ 27,000.00 │ 3,258.90 │ 5,585.00 │ 0.21 + │ COMERCIAL │ │ │ CALCIO │ │ │ │ │ │ │ + │ QUIMICIAL │ │ │ │ │ │ │ │ │ │ + │ CIA. LTDA. │ │ │ │ │ │ │ │ │ │ + 5 │ PICA │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 18/01/2016 │ 66,500.00 │ 12,635.00 │ 18,670.52 │ 0.28 + │ PLASTICOS │ S.A │ │ CALCIO │ │ │ │ │ │ │ + │ INDUSTRIALES │ │ │ │ │ │ │ │ │ │ + │ C.A. │ │ │ │ │ │ │ │ │ │ + 6 │ PLASTIQUIM │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 25/10/2016 │ 33,000.00 │ 6,270.00 │ 9,999.00 │ 0.30 + │ S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │ + │ │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │ + │ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │ + │ │ │ │ ESTEARICO │ │ │ │ │ │ │ + │ │ │ │ OMYA CARB 1T │ │ │ │ │ │ │ + │ │ │ │ CG BBS 1000 │ │ │ │ │ │ │ + 7 │ QUIMICOS │ SIBELCO │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/11/2016 │ 03/11/2016 │ 52,000.00 │ 8,944.00 │ 13,039.05 │ 0.25 + │ ANDINOS │ COLOMBIA SAS │ │ CALCIO │ │ │ │ │ │ │ + │ QUIMANDI │ │ │ RECUBIERTO │ │ │ │ │ │ │ + │ S.A. │ │ │ │ │ │ │ │ │ │ + 8 │ TIGRE │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 28/10/2016 │ 66,000.00 │ 11,748.00 │ 18,216.00 │ 0.28 + │ ECUADOR S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │ + │ ECUATIGRE │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │ + │ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │ + │ │ │ │ ESTEARICO │ │ │ │ │ │ │ + │ │ │ │ OMYACARB 1T │ │ │ │ │ │ │ + │ │ │ │ CG BPA 25 NO │ │ │ │ │ │ │ +━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━━━━ +> open caco3_plastics.tsv | to-tsv +importer shipper tariff_item name origin shipped_at arrived_at net_weight fob_price cif_price cif_per_net_weight +PLASTICOS RIVAL CIA LTDA S A REVERTE 2509000000 CARBONATO DE CALCIO TIPO CALCIPORE 160 T AL SPAIN 18/03/2016 17/04/2016 81,000.00 14,417.58 18,252.34 0.23 +MEXICHEM ECUADOR S.A. OMYA ANDINA S A 2836500000 CARBONATO COLOMBIA 07/07/2016 10/07/2016 26,000.00 7,072.00 8,127.18 0.31 +PLASTIAZUAY SA SA REVERTE 2836500000 CARBONATO DE CALCIO SPAIN 27/07/2016 09/08/2016 81,000.00 8,100.00 11,474.55 0.14 +PLASTICOS RIVAL CIA LTDA AND ENDUSTRIYEL HAMMADDELER DIS TCARET LTD.STI. 2836500000 CALCIUM CARBONATE ANADOLU ANDCARB CT-1 TURKEY 04/10/2016 11/11/2016 100,000.00 17,500.00 22,533.75 0.23 +QUIMICA COMERCIAL QUIMICIAL CIA. LTDA. SA REVERTE 2836500000 CARBONATO DE CALCIO SPAIN 24/06/2016 12/07/2016 27,000.00 3,258.90 5,585.00 0.21 +PICA PLASTICOS INDUSTRIALES C.A. OMYA ANDINA S.A 3824909999 CARBONATO DE CALCIO COLOMBIA 01/01/1900 18/01/2016 66,500.00 12,635.00 18,670.52 0.28 +PLASTIQUIM S.A. OMYA ANDINA S.A NIT 830.027.386-6 3824909999 CARBONATO DE CALCIO RECUBIERTO CON ACIDO ESTEARICO OMYA CARB 1T CG BBS 1000 COLOMBIA 01/01/1900 25/10/2016 33,000.00 6,270.00 9,999.00 0.30 +QUIMICOS ANDINOS QUIMANDI S.A. SIBELCO COLOMBIA SAS 3824909999 CARBONATO DE CALCIO RECUBIERTO COLOMBIA 01/11/2016 03/11/2016 52,000.00 8,944.00 13,039.05 0.25 +TIGRE ECUADOR S.A. ECUATIGRE OMYA ANDINA S.A NIT 830.027.386-6 3824909999 CARBONATO DE CALCIO RECUBIERTO CON ACIDO ESTEARICO OMYACARB 1T CG BPA 25 NO COLOMBIA 01/01/1900 28/10/2016 66,000.00 11,748.00 18,216.00 0.28 + +``` diff --git a/docs/commands/to-url.md b/docs/commands/to-url.md new file mode 100644 index 0000000000..ad11133760 --- /dev/null +++ b/docs/commands/to-url.md @@ -0,0 +1,35 @@ +# to-url + +Converts table data into url-formatted text. + +## Example + +```shell +> shells +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────── + 0 │ X │ filesystem │ /home/shaurya + 1 │ │ filesystem │ /home/shaurya/Pictures + 2 │ │ filesystem │ /home/shaurya/Desktop +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | to-url +━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + # │ value +───┼─────────────────────────────────────────────────────── + 0 │ +=X&name=filesystem&path=%2Fhome%2Fshaurya + 1 │ +=+&name=filesystem&path=%2Fhome%2Fshaurya%2FPictures + 2 │ +=+&name=filesystem&path=%2Fhome%2Fshaurya%2FDesktop +━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +``` + +```shell +> open sample.url +━━━━━━━━━━┯━━━━━━━━┯━━━━━━┯━━━━━━━━ + bread │ cheese │ meat │ fat +──────────┼────────┼──────┼──────── + baguette │ comté │ ham │ butter +━━━━━━━━━━┷━━━━━━━━┷━━━━━━┷━━━━━━━━ +> open sample.url | to-url +bread=baguette&cheese=comt%C3%A9&meat=ham&fat=butter +``` diff --git a/docs/commands/to-yaml.md b/docs/commands/to-yaml.md new file mode 100644 index 0000000000..b2be3768ef --- /dev/null +++ b/docs/commands/to-yaml.md @@ -0,0 +1,60 @@ +# to-yaml + +Converts table data into yaml text. + +## Example + +```shell +> shells +━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━ + # │ │ name │ path +───┼───┼────────────┼──────────────────────── + 0 │ X │ filesystem │ /home/shaurya + 1 │ │ filesystem │ /home/shaurya/Pictures + 2 │ │ filesystem │ /home/shaurya/Desktop +━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━ +> shells | to-yaml +--- +- " ": X + name: filesystem + path: /home/shaurya +- " ": " " + name: filesystem + path: /home/shaurya/Pictures +- " ": " " + name: filesystem + path: /home/shaurya/Desktop +``` + +```shell +> open appveyor.yml +━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━ + image │ environment │ install │ build │ test_script │ cache +────────────────────┼────────────────┼─────────────────┼───────┼─────────────────┼───────────────── + Visual Studio 2017 │ [table: 1 row] │ [table: 5 rows] │ │ [table: 2 rows] │ [table: 2 rows] +━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━ +> open appveyor.yml | to-yaml +--- +image: Visual Studio 2017 +environment: + global: + PROJECT_NAME: nushell + RUST_BACKTRACE: 1 + matrix: + - TARGET: x86_64-pc-windows-msvc + CHANNEL: nightly + BITS: 64 +install: + - "set PATH=C:\\msys64\\mingw%BITS%\\bin;C:\\msys64\\usr\\bin;%PATH%" + - "curl -sSf -o rustup-init.exe https://win.rustup.rs" + - rustup-init.exe -y --default-host %TARGET% --default-toolchain %CHANNEL%-%TARGET% + - "set PATH=%PATH%;C:\\Users\\appveyor\\.cargo\\bin" + - "call \"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community\\VC\\Auxiliary\\Build\\vcvars64.bat\"" +build: false +test_script: + - cargo build --verbose + - cargo test --all --verbose +cache: + - target -> Cargo.lock + - "C:\\Users\\appveyor\\.cargo\\registry -> Cargo.lock" +``` diff --git a/docs/commands/trim.md b/docs/commands/trim.md new file mode 100644 index 0000000000..5f01a688f7 --- /dev/null +++ b/docs/commands/trim.md @@ -0,0 +1,12 @@ +# trim + +Trim leading and following whitespace from text data + +## Example + +```shell +> echo " Hello world" + Hello world +> echo " Hello world" | trim +Hello world +``` \ No newline at end of file diff --git a/docs/commands/version.md b/docs/commands/version.md new file mode 100644 index 0000000000..d0b8828990 --- /dev/null +++ b/docs/commands/version.md @@ -0,0 +1,14 @@ +# version + +Outputs the nushell version. + +## Examples + +```shell +> version +━━━━━━━━━ + version +───────── + 0.3.0 +━━━━━━━━━ +``` diff --git a/docs/commands/where.md b/docs/commands/where.md new file mode 100644 index 0000000000..be962726ee --- /dev/null +++ b/docs/commands/where.md @@ -0,0 +1,34 @@ +# where + +This command filters the content of a table based on a condition passed as a parameter, which must be a boolean expression making use of any of the table columns. Other commands such as `ls` are capable of feeding `where` with their output through pipelines. + +## Usage +```shell +> [input-command] | where [condition] +``` + +## Examples + +```shell +> ls | where size > 4kb +----+----------------+------+----------+----------+----------------+---------------- + # | name | type | readonly | size | accessed | modified +----+----------------+------+----------+----------+----------------+---------------- + 0 | IMG_1291.jpg | File | | 115.5 KB | a month ago | 4 months ago + 1 | README.md | File | | 11.1 KB | 2 days ago | 2 days ago + 2 | IMG_1291.png | File | | 589.0 KB | a month ago | a month ago + 3 | IMG_1381.jpg | File | | 81.0 KB | a month ago | 4 months ago + 4 | butterfly.jpeg | File | | 4.2 KB | a month ago | a month ago + 5 | Cargo.lock | File | | 199.6 KB | 22 minutes ago | 22 minutes ago +``` + +```shell +> ps | where cpu > 10 +---+-------+----------+-------+----------------------------- + # | pid | status | cpu | name +---+-------+----------+-------+----------------------------- + 0 | 1992 | Sleeping | 44.52 | /usr/bin/gnome-shell + 1 | 1069 | Sleeping | 16.15 | + 2 | 24116 | Sleeping | 13.70 | /opt/google/chrome/chrome + 3 | 21976 | Sleeping | 12.67 | /usr/share/discord/Discord +``` diff --git a/docs/docker.md b/docs/docker.md new file mode 100644 index 0000000000..6484cdfd8e --- /dev/null +++ b/docs/docker.md @@ -0,0 +1,124 @@ +# Docker Guide + +| tag | base image | plugins | package manager | libs & bins | size | +| ------------------ | -------------------- | ------- | --------------- | ---------------------------------------------------------------- | ----------- | +| `latest`, `debian` | `debian:latest` | yes | apt | **a lot**, including _glibc_ | ~(48+62) MB | +| `slim` | `debian:stable-slim` | yes | apt | all `nu:debian` image but exclude [this list][.slimify-excludes] | ~(26+62) MB | +| `alpine` | `alpine:latest` | yes | apk | all `nu:musl-busybox` image + libcrypto, libssl, libtls, libz | ~(3+61) MB | +| `musl-busybox` | `busybox:musl` | no | — | GNU utils + _musl_ | ~(1+16) MB | +| `glibc-busybox` | `busybox:glibc` | no | — | GNU utils + _glibc_ | ~(3+17) MB | +| `musl-distroless` | `distroless/static` | no | — | see [here][distroless/base] | ~(2+16) MB | +| `glibc-distroless` | `distroless/cc` | no | — | `distroless/static` with _glibc_ | ~(17+17) MB | +| `glibc` | `scratch` | no | — | **only `nu` binary-executable** which depend on glibc runtime | ~17 MB | +| `musl` | `scratch` | no | — | **only `nu` binary-executable** statically linked to musl | ~16 MB | + +[.slimify-excludes]: https://github.com/debuerreotype/debuerreotype/blob/master/scripts/.slimify-excludes +[distroless/base]: https://github.com/GoogleContainerTools/distroless/blob/master/base/README.md + +## Image Variants + +### `nu:` +This is the defacto image. If you are unsure about what your needs are, you probably want to use this one. It is designed to be used both as a throw away container (mount your source code and start the container to start your app), as well as the base to build other images off of. + +
example + +Let say you create a plugin in Rust. +- create a Dockerfile in your root project +```dockerfile +FROM nu:0.2 + +COPY /target/debug/nu_plugin_cowsay /bin/ +ENTRYPOINT ["nu"] +``` +- build your project first then run it via docker +```console +cargo build +docker run -it . +``` +
+ +### `nu:-slim` +This image does not contain the common packages contained in the default tag and only contains the minimal packages needed to run `nu`. Unless you are working in an environment where only the `nu` image will be deployed and you have space constraints, it's highly recommended to use the alpine image if you aim for small image size. Only use this image if you really need **both** `glibc` and small image size. + +### `nu:-alpine` +This image is based on the popular [Alpine Linux project](https://alpinelinux.org/), available in [the alpine official image][alpine]. Alpine Linux is much smaller than most distribution base images (~5MB), and thus leads to much slimmer images in general. + +This variant is highly recommended when final image size being as small as possible is desired. The main caveat to note is that it does use `musl` libc instead of `glibc` and friends, so certain software might run into issues depending on the depth of their libc requirements. However, most software doesn't have an issue with this, so this variant is usually a very safe choice. See [this Hacker News comment thread](https://news.ycombinator.com/item?id=10782897) for more discussion of the issues that might arise and some pro/con comparisons of using Alpine-based images. + +To minimize image size, it's uncommon for additional related tools (such as `git` or `bash`) to be included in Alpine-based images. Using this image as a base, add the things you need in your own Dockerfile (see the [alpine image description][alpine] for examples of how to install packages if you are unfamiliar). + +### `nu:-` +This image is based on [`scratch`](https://hub.docker.com/_/scratch) which doesn't create an extra layer. This variants can be handy in a project that uses multiple programming language as you need a lot of tools. By using this in [multi-stage build][], you can slim down the docker image that need to be pulled. + +[multi-stage build]: https://docs.docker.com/develop/develop-images/multistage-build/ + +
example + +- using `glibc` variant +```dockerfile +FROM nu:0.2-glibc as shell +FROM node:slim + +# Build your plugins + +COPY --from=shell /bin/nu /bin/ +# Something else +ENTRYPOINT ["nu"] +``` + +- using `musl` variant +```dockerfile +FROM nu:musl as shell +FROM go:alpine + +# Build your plugins + +COPY --from=shell /bin/nu /bin/ +# Something else +ENTRYPOINT ["nu"] +``` +
+ +### `nu:--distroless` +This image is base on [Distroless](https://github.com/GoogleContainerTools/distroless) which usually to contain only your application and its runtime dependencies. This image do not contain package managers, shells or any other programs you would expect to find in a standard Linux distribution except for nushell itself. All distroless variant always contains: +- ca-certificates +- A /etc/passwd entry for a root user +- A /tmp directory +- tzdata + +As for `glibc-distroless` variant, it **adds**: +- glibc +- libssl +- openssl + +> Most likely you want to use this in CI/CD environment for plugins that can be statically compiled. + +
example + +```dockerfile +FROM nu:musl-distroless + +COPY target/x86_64-unknown-linux-musl/release/nu_plugin_* /bin/ +ENTRYPOINT ["nu"] +``` +
+ +### `nu:--busybox` +This image is based on [Busybox](https://www.busybox.net/) which is a very good ingredient to craft space-efficient distributions. It combines tiny versions of many common UNIX utilities into a single small executable. It also provides replacements for most of the utilities you usually find in GNU fileutils, shellutils, etc. The utilities in BusyBox generally have fewer options than their full-featured GNU cousins; however, the options that are included provide the expected functionality and behave very much like their GNU counterparts. Basically, this image provides a fairly complete environment for any small or embedded system. + +> Use this only if you need common utilities like `tar`, `awk`, and many more but don't want extra blob like nushell plugins and others. + +
example + +```dockerfile +FROM nu:0.2-glibc-busybox + +ADD https://github.com/user/repo/releases/download/latest/nu_plugin_cowsay.tar.gz /tmp/ +RUN tar xzfv nu_plugin_cowsay.tar.gz -C /bin --strip=1 nu_plugin_cowsay + +ENTRYPOINT ["nu"] +``` +
+ +[musl]: https://www.musl-libc.org/ +[alpine]: https://hub.docker.com/_/alpine/ \ No newline at end of file diff --git a/features.toml b/features.toml new file mode 100644 index 0000000000..f7cea6d9e9 --- /dev/null +++ b/features.toml @@ -0,0 +1,13 @@ +[hintsv1] + +description = "Adding hints based upon error states in the syntax highlighter" +enabled = false + +[coloring_in_tokens] + +description = "Move coloring into the TokensIterator so they can be atomic with the rest of the iterator" +reason = """ +This is laying the groundwork for merging coloring and parsing. It also makes token_nodes.atomic() naturally +work with coloring, which is pretty useful on its own. +""" +enabled = false \ No newline at end of file diff --git a/images/nushell-autocomplete.gif b/images/nushell-autocomplete.gif new file mode 100644 index 0000000000..87540af37f Binary files /dev/null and b/images/nushell-autocomplete.gif differ diff --git a/rust-toolchain b/rust-toolchain index e6ae9d2242..c3a3f37794 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -nightly-2019-08-30 +beta-2019-09-25 diff --git a/src/cli.rs b/src/cli.rs index a5a2aebdf7..f46db10529 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,4 +1,3 @@ -use crate::commands::autoview; use crate::commands::classified::{ ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand, StreamNext, @@ -7,22 +6,29 @@ use crate::commands::plugin::JsonRpc; use crate::commands::plugin::{PluginCommand, PluginSink}; use crate::commands::whole_stream_command; use crate::context::Context; +use crate::data::config; +use crate::data::Value; pub(crate) use crate::errors::ShellError; +use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult}; use crate::git::current_branch; -use crate::object::Value; use crate::parser::registry::Signature; -use crate::parser::{hir, CallNode, Pipeline, PipelineElement, TokenNode}; +use crate::parser::{ + hir, + hir::syntax_shape::{expand_syntax, PipelineShape}, + hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator}, + TokenNode, +}; use crate::prelude::*; use log::{debug, trace}; -use regex::Regex; use rustyline::error::ReadlineError; use rustyline::{self, config::Configurer, config::EditMode, ColorMode, Config, Editor}; use std::env; use std::error::Error; use std::io::{BufRead, BufReader, Write}; use std::iter::Iterator; -use std::sync::atomic::{AtomicBool, Ordering}; +use std::path::PathBuf; +use std::sync::atomic::Ordering; #[derive(Debug)] pub enum MaybeOwned<'a, T> { @@ -60,6 +66,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel let result = match reader.read_line(&mut input) { Ok(count) => { trace!("processing response ({} bytes)", count); + trace!("response: {}", input); let response = serde_json::from_str::>>(&input); match response { @@ -69,28 +76,39 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel trace!("processing {:?}", params); - if params.is_filter { - let fname = fname.to_string(); - let name = params.name.clone(); - context.add_commands(vec![whole_stream_command(PluginCommand::new( - name, fname, params, - ))]); - Ok(()) + let name = params.name.clone(); + let fname = fname.to_string(); + + if let Some(_) = context.get_command(&name) { + trace!("plugin {:?} already loaded.", &name); } else { - let fname = fname.to_string(); - let name = params.name.clone(); - context.add_commands(vec![whole_stream_command(PluginSink::new( - name, fname, params, - ))]); - Ok(()) + if params.is_filter { + context.add_commands(vec![whole_stream_command( + PluginCommand::new(name, fname, params), + )]); + } else { + context.add_commands(vec![whole_stream_command(PluginSink::new( + name, fname, params, + ))]); + }; } + Ok(()) } Err(e) => Err(e), }, - Err(e) => Err(ShellError::string(format!("Error: {:?}", e))), + Err(e) => { + trace!("incompatible plugin {:?}", input); + Err(ShellError::untagged_runtime_error(format!( + "Error: {:?}", + e + ))) + } } } - Err(e) => Err(ShellError::string(format!("Error: {:?}", e))), + Err(e) => Err(ShellError::untagged_runtime_error(format!( + "Error: {:?}", + e + ))), }; let _ = child.wait(); @@ -98,38 +116,12 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel result } -fn load_plugins_in_dir(path: &std::path::PathBuf, context: &mut Context) -> Result<(), ShellError> { - let re_bin = Regex::new(r"^nu_plugin_[A-Za-z_]+$")?; - let re_exe = Regex::new(r"^nu_plugin_[A-Za-z_]+\.(exe|bat)$")?; +fn search_paths() -> Vec { + let mut search_paths = Vec::new(); - trace!("Looking for plugins in {:?}", path); - - match std::fs::read_dir(path) { - Ok(p) => { - for entry in p { - let entry = entry?; - let filename = entry.file_name(); - let f_name = filename.to_string_lossy(); - - if re_bin.is_match(&f_name) || re_exe.is_match(&f_name) { - let mut load_path = path.clone(); - trace!("Found {:?}", f_name); - load_path.push(f_name.to_string()); - load_plugin(&load_path, context)?; - } - } - } - _ => {} - } - Ok(()) -} - -fn load_plugins(context: &mut Context) -> Result<(), ShellError> { match env::var_os("PATH") { Some(paths) => { - for path in env::split_paths(&paths) { - let _ = load_plugins_in_dir(&path, context); - } + search_paths = env::split_paths(&paths).collect::>(); } None => println!("PATH is not defined in the environment."), } @@ -140,7 +132,10 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> { let mut path = std::path::PathBuf::from("."); path.push("target"); path.push("debug"); - let _ = load_plugins_in_dir(&path, context); + + if path.exists() { + search_paths.push(path); + } } #[cfg(not(debug_assertions))] @@ -150,12 +145,104 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> { path.push("target"); path.push("release"); - let _ = load_plugins_in_dir(&path, context); + if path.exists() { + search_paths.push(path); + } + } + + // permit Nu finding and picking up development plugins + // if there are any first. + search_paths.reverse(); + search_paths +} + +fn load_plugins(context: &mut Context) -> Result<(), ShellError> { + let opts = glob::MatchOptions { + case_sensitive: false, + require_literal_separator: false, + require_literal_leading_dot: false, + }; + + for path in search_paths() { + let mut pattern = path.to_path_buf(); + + pattern.push(std::path::Path::new("nu_plugin_[a-z]*")); + + match glob::glob_with(&pattern.to_string_lossy(), opts) { + Err(_) => {} + Ok(binaries) => { + for bin in binaries.filter_map(Result::ok) { + if !bin.is_file() { + continue; + } + + let bin_name = { + if let Some(name) = bin.file_name() { + match name.to_str() { + Some(raw) => raw, + None => continue, + } + } else { + continue; + } + }; + + let is_valid_name = { + #[cfg(windows)] + { + bin_name + .chars() + .all(|c| c.is_ascii_alphabetic() || c == '_' || c == '.') + } + + #[cfg(not(windows))] + { + bin_name + .chars() + .all(|c| c.is_ascii_alphabetic() || c == '_') + } + }; + + let is_executable = { + #[cfg(windows)] + { + bin_name.ends_with(".exe") || bin_name.ends_with(".bat") + } + + #[cfg(not(windows))] + { + true + } + }; + + if is_valid_name && is_executable { + trace!("Trying {:?}", bin.display()); + + // we are ok if this plugin load fails + let _ = load_plugin(&bin, context); + } + } + } + } } Ok(()) } +pub struct History; + +impl History { + pub fn path() -> PathBuf { + const FNAME: &str = "history.txt"; + config::user_data() + .map(|mut p| { + p.push(FNAME); + p + }) + .unwrap_or(PathBuf::from(FNAME)) + } +} + pub async fn cli() -> Result<(), Box> { let mut context = Context::basic()?; @@ -163,7 +250,7 @@ pub async fn cli() -> Result<(), Box> { use crate::commands::*; context.add_commands(vec![ - whole_stream_command(PS), + whole_stream_command(PWD), whole_stream_command(LS), whole_stream_command(CD), whole_stream_command(Size), @@ -171,15 +258,15 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(Next), whole_stream_command(Previous), whole_stream_command(Debug), - whole_stream_command(Lines), whole_stream_command(Shells), whole_stream_command(SplitColumn), whole_stream_command(SplitRow), whole_stream_command(Lines), whole_stream_command(Reject), whole_stream_command(Reverse), + whole_stream_command(Append), + whole_stream_command(Prepend), whole_stream_command(Trim), - whole_stream_command(ToArray), whole_stream_command(ToBSON), whole_stream_command(ToCSV), whole_stream_command(ToJSON), @@ -187,43 +274,50 @@ pub async fn cli() -> Result<(), Box> { whole_stream_command(ToDB), whole_stream_command(ToTOML), whole_stream_command(ToTSV), + whole_stream_command(ToURL), whole_stream_command(ToYAML), whole_stream_command(SortBy), + whole_stream_command(GroupBy), whole_stream_command(Tags), + whole_stream_command(Count), whole_stream_command(First), whole_stream_command(Last), - whole_stream_command(FromArray), - whole_stream_command(FromArray), + whole_stream_command(Env), whole_stream_command(FromCSV), whole_stream_command(FromTSV), + whole_stream_command(FromSSV), whole_stream_command(FromINI), whole_stream_command(FromBSON), whole_stream_command(FromJSON), whole_stream_command(FromDB), whole_stream_command(FromSQLite), whole_stream_command(FromTOML), + whole_stream_command(FromURL), whole_stream_command(FromXML), whole_stream_command(FromYAML), whole_stream_command(FromYML), whole_stream_command(Pick), whole_stream_command(Get), per_item_command(Remove), + per_item_command(Fetch), per_item_command(Open), per_item_command(Post), per_item_command(Where), + per_item_command(Echo), whole_stream_command(Config), whole_stream_command(SkipWhile), per_item_command(Enter), per_item_command(Help), + per_item_command(History), whole_stream_command(Exit), whole_stream_command(Autoview), + whole_stream_command(Pivot), per_item_command(Cpy), whole_stream_command(Date), per_item_command(Mkdir), per_item_command(Move), whole_stream_command(Save), whole_stream_command(Table), - whole_stream_command(VTable), whole_stream_command(Version), whole_stream_command(Which), ]); @@ -235,6 +329,7 @@ pub async fn cli() -> Result<(), Box> { )]); } } + let _ = load_plugins(&mut context); let config = Config::builder().color_mode(ColorMode::Forced).build(); @@ -246,28 +341,25 @@ pub async fn cli() -> Result<(), Box> { } // we are ok if history does not exist - let _ = rl.load_history("history.txt"); + let _ = rl.load_history(&History::path()); - let ctrl_c = Arc::new(AtomicBool::new(false)); - let cc = ctrl_c.clone(); + let cc = context.ctrl_c.clone(); ctrlc::set_handler(move || { cc.store(true, Ordering::SeqCst); }) .expect("Error setting Ctrl-C handler"); let mut ctrlcbreak = false; loop { - if ctrl_c.load(Ordering::SeqCst) { - ctrl_c.store(false, Ordering::SeqCst); + if context.ctrl_c.load(Ordering::SeqCst) { + context.ctrl_c.store(false, Ordering::SeqCst); continue; } let cwd = context.shell_manager.path(); - rl.set_helper(Some(crate::shell::Helper::new( - context.shell_manager.clone(), - ))); + rl.set_helper(Some(crate::shell::Helper::new(context.clone()))); - let edit_mode = crate::object::config::config(Span::unknown())? + let edit_mode = config::config(Tag::unknown())? .get("edit_mode") .map(|s| match s.as_string().unwrap().as_ref() { "vi" => EditMode::Vi, @@ -278,22 +370,70 @@ pub async fn cli() -> Result<(), Box> { rl.set_edit_mode(edit_mode); - let readline = rl.readline(&format!( + // Register Ctrl-r for history fuzzy search + // rustyline doesn't support custom commands, so we override Ctrl-D (EOF) + // https://github.com/nushell/nushell/issues/689 + #[cfg(all(not(windows), feature = "crossterm"))] + rl.bind_sequence(rustyline::KeyPress::Ctrl('R'), rustyline::Cmd::EndOfFile); + // Redefine Ctrl-D to same command as Ctrl-C + rl.bind_sequence(rustyline::KeyPress::Ctrl('D'), rustyline::Cmd::Interrupt); + + let prompt = &format!( "{}{}> ", cwd, match current_branch() { Some(s) => format!("({})", s), None => "".to_string(), } - )); + ); + let mut initial_command = Some(String::new()); + let mut readline = Err(ReadlineError::Eof); + while let Some(ref cmd) = initial_command { + readline = rl.readline_with_initial(prompt, (&cmd, "")); + if let Err(ReadlineError::Eof) = &readline { + // Fuzzy search in history + let lines = rl.history().iter().rev().map(|s| s.as_str()).collect(); + let selection = interactive_fuzzy_search(&lines, 5); // Clears last line with prompt + match selection { + SelectionResult::Selected(line) => { + println!("{}{}", &prompt, &line); // TODO: colorize prompt + readline = Ok(line.clone()); + initial_command = None; + } + SelectionResult::Edit(line) => { + initial_command = Some(line); + } + SelectionResult::NoSelection => { + readline = Ok("".to_string()); + initial_command = None; + } + } + } else { + initial_command = None; + } + } match process_line(readline, &mut context).await { LineResult::Success(line) => { rl.add_history_entry(line.clone()); + let _ = rl.save_history(&History::path()); } LineResult::CtrlC => { + let config_ctrlc_exit = config::config(Tag::unknown())? + .get("ctrlc_exit") + .map(|s| match s.as_string().unwrap().as_ref() { + "true" => true, + _ => false, + }) + .unwrap_or(false); // default behavior is to allow CTRL-C spamming similar to other shells + + if !config_ctrlc_exit { + continue; + } + if ctrlcbreak { + let _ = rl.save_history(&History::path()); std::process::exit(0); } else { context.with_host(|host| host.stdout("CTRL-C pressed (again to quit)")); @@ -302,21 +442,12 @@ pub async fn cli() -> Result<(), Box> { } } - LineResult::Error(mut line, err) => { + LineResult::Error(line, err) => { rl.add_history_entry(line.clone()); - let diag = err.to_diagnostic(); + let _ = rl.save_history(&History::path()); + context.with_host(|host| { - let writer = host.err_termcolor(); - line.push_str(" "); - let files = crate::parser::Files::new(line); - let _ = std::panic::catch_unwind(move || { - let _ = language_reporting::emit( - &mut writer.lock(), - &files, - &diag, - &language_reporting::DefaultConfig, - ); - }); + print_err(err, host, &Text::from(line)); }) } @@ -328,11 +459,19 @@ pub async fn cli() -> Result<(), Box> { } // we are ok if we can not save history - let _ = rl.save_history("history.txt"); + let _ = rl.save_history(&History::path()); Ok(()) } +fn chomp_newline(s: &str) -> &str { + if s.ends_with('\n') { + &s[..s.len() - 1] + } else { + s + } +} + enum LineResult { Success(String), Error(String, ShellError), @@ -345,9 +484,11 @@ async fn process_line(readline: Result, ctx: &mut Context Ok(line) if line.trim() == "" => LineResult::Success(line.clone()), Ok(line) => { + let line = chomp_newline(line); + let result = match crate::parser::parse(&line) { Err(err) => { - return LineResult::Error(line.clone(), err); + return LineResult::Error(line.to_string(), err); } Ok(val) => val, @@ -358,7 +499,7 @@ async fn process_line(readline: Result, ctx: &mut Context let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) { Ok(pipeline) => pipeline, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }; match pipeline.commands.last() { @@ -366,8 +507,8 @@ async fn process_line(readline: Result, ctx: &mut Context _ => pipeline .commands .push(ClassifiedCommand::Internal(InternalCommand { - command: whole_stream_command(autoview::Autoview), - name_span: Span::unknown(), + name: "autoview".to_string(), + name_tag: Tag::unknown(), args: hir::Call::new( Box::new(hir::Expression::synthetic_string("autoview")), None, @@ -379,6 +520,44 @@ async fn process_line(readline: Result, ctx: &mut Context let mut input = ClassifiedInputStream::new(); let mut iter = pipeline.commands.into_iter().peekable(); + let mut is_first_command = true; + + // Check the config to see if we need to update the path + // TODO: make sure config is cached so we don't path this load every call + let config = crate::data::config::read(Tag::unknown(), &None).unwrap(); + if config.contains_key("path") { + // Override the path with what they give us from config + let value = config.get("path"); + + match value { + Some(value) => match value { + Tagged { + item: Value::Table(table), + .. + } => { + let mut paths = vec![]; + for val in table { + let path_str = val.as_string(); + match path_str { + Err(_) => {} + Ok(path_str) => { + paths.push(PathBuf::from(path_str)); + } + } + } + let path_os_string = std::env::join_paths(&paths); + match path_os_string { + Ok(path_os_string) => { + std::env::set_var("PATH", path_os_string); + } + Err(_) => {} + } + } + _ => {} + }, + None => {} + } + } loop { let item: Option = iter.next(); @@ -387,16 +566,24 @@ async fn process_line(readline: Result, ctx: &mut Context input = match (item, next) { (None, _) => break, + (Some(ClassifiedCommand::Dynamic(_)), _) + | (_, Some(ClassifiedCommand::Dynamic(_))) => { + return LineResult::Error( + line.to_string(), + ShellError::unimplemented("Dynamic commands"), + ) + } + (Some(ClassifiedCommand::Expr(_)), _) => { return LineResult::Error( - line.clone(), + line.to_string(), ShellError::unimplemented("Expression-only commands"), ) } (_, Some(ClassifiedCommand::Expr(_))) => { return LineResult::Error( - line.clone(), + line.to_string(), ShellError::unimplemented("Expression-only commands"), ) } @@ -404,22 +591,46 @@ async fn process_line(readline: Result, ctx: &mut Context ( Some(ClassifiedCommand::Internal(left)), Some(ClassifiedCommand::External(_)), - ) => match left.run(ctx, input, Text::from(line)).await { + ) => match left.run(ctx, input, Text::from(line), is_first_command) { Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }, (Some(ClassifiedCommand::Internal(left)), Some(_)) => { - match left.run(ctx, input, Text::from(line)).await { + match left.run(ctx, input, Text::from(line), is_first_command) { Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } (Some(ClassifiedCommand::Internal(left)), None) => { - match left.run(ctx, input, Text::from(line)).await { - Ok(val) => ClassifiedInputStream::from_input_stream(val), - Err(err) => return LineResult::Error(line.clone(), err), + match left.run(ctx, input, Text::from(line), is_first_command) { + Ok(val) => { + use futures::stream::TryStreamExt; + + let mut output_stream: OutputStream = val.into(); + loop { + match output_stream.try_next().await { + Ok(Some(ReturnSuccess::Value(Tagged { + item: Value::Error(e), + .. + }))) => { + return LineResult::Error(line.to_string(), e); + } + Ok(Some(_item)) => { + if ctx.ctrl_c.load(Ordering::SeqCst) { + break; + } + } + _ => { + break; + } + } + } + + return LineResult::Success(line.to_string()); + } + Err(err) => return LineResult::Error(line.to_string(), err), } } @@ -428,32 +639,31 @@ async fn process_line(readline: Result, ctx: &mut Context Some(ClassifiedCommand::External(_)), ) => match left.run(ctx, input, StreamNext::External).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), }, (Some(ClassifiedCommand::External(left)), Some(_)) => { match left.run(ctx, input, StreamNext::Internal).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } (Some(ClassifiedCommand::External(left)), None) => { match left.run(ctx, input, StreamNext::Last).await { Ok(val) => val, - Err(err) => return LineResult::Error(line.clone(), err), + Err(err) => return LineResult::Error(line.to_string(), err), } } - } + }; + + is_first_command = false; } - LineResult::Success(line.clone()) + LineResult::Success(line.to_string()) } Err(ReadlineError::Interrupted) => LineResult::CtrlC, - Err(ReadlineError::Eof) => { - println!("CTRL-D"); - LineResult::Break - } + Err(ReadlineError::Eof) => LineResult::Break, Err(err) => { println!("Error: {:?}", err); LineResult::Break @@ -466,98 +676,52 @@ fn classify_pipeline( context: &Context, source: &Text, ) -> Result { - let pipeline = pipeline.as_pipeline()?; + let mut pipeline_list = vec![pipeline.clone()]; + let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span()); - let Pipeline { parts, .. } = pipeline; - - let commands: Result, ShellError> = parts - .iter() - .map(|item| classify_command(&item, context, &source)) - .collect(); - - Ok(ClassifiedPipeline { - commands: commands?, - }) -} - -fn classify_command( - command: &PipelineElement, - context: &Context, - source: &Text, -) -> Result { - let call = command.call(); - - match call { - // If the command starts with `^`, treat it as an external command no matter what - call if call.head().is_external() => { - let name_span = call.head().expect_external(); - let name = name_span.slice(source); - - Ok(external_command(call, source, name.tagged(name_span))) - } - - // Otherwise, if the command is a bare word, we'll need to triage it - call if call.head().is_bare() => { - let head = call.head(); - let name = head.source(source); - - match context.has_command(name) { - // if the command is in the registry, it's an internal command - true => { - let command = context.get_command(name); - let config = command.signature(); - - trace!(target: "nu::build_pipeline", "classifying {:?}", config); - - let args: hir::Call = config.parse_args(call, &context, source)?; - - trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source)); - - Ok(ClassifiedCommand::Internal(InternalCommand { - command, - name_span: head.span().clone(), - args, - })) - } - - // otherwise, it's an external command - false => Ok(external_command(call, source, name.tagged(head.span()))), - } - } - - // If the command is something else (like a number or a variable), that is currently unsupported. - // We might support `$somevar` as a curried command in the future. - call => Err(ShellError::invalid_command(call.head().span())), - } + expand_syntax( + &PipelineShape, + &mut iterator, + &context.expand_context(source, pipeline.span()), + ) } // Classify this command as an external command, which doesn't give special meaning // to nu syntactic constructs, and passes all arguments to the external command as // strings. -fn external_command( - call: &Tagged, +pub(crate) fn external_command( + tokens: &mut TokensIterator, source: &Text, name: Tagged<&str>, -) -> ClassifiedCommand { - let arg_list_strings: Vec> = match call.children() { - Some(args) => args +) -> Result { + let arg_list_strings = expand_external_tokens(tokens, source)?; + + Ok(ClassifiedCommand::External(ExternalCommand { + name: name.to_string(), + name_tag: name.tag(), + args: arg_list_strings .iter() - .filter_map(|i| match i { - TokenNode::Whitespace(_) => None, - other => Some(Tagged::from_simple_spanned_item( - other.as_external_arg(source), - other.span(), - )), + .map(|x| Tagged { + tag: x.span.into(), + item: x.item.clone(), }) .collect(), - None => vec![], - }; - - let (name, tag) = name.into_parts(); - - ClassifiedCommand::External(ExternalCommand { - name: name.to_string(), - name_span: tag.span, - args: arg_list_strings, - }) + })) +} + +pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) { + let diag = err.to_diagnostic(); + + let writer = host.err_termcolor(); + let mut source = source.to_string(); + source.push_str(" "); + let files = crate::parser::Files::new(source); + let _ = std::panic::catch_unwind(move || { + let _ = language_reporting::emit( + &mut writer.lock(), + &files, + &diag, + &language_reporting::DefaultConfig, + ); + }); } diff --git a/src/commands.rs b/src/commands.rs index 0da8cadbd4..ba69d1e822 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -1,6 +1,7 @@ #[macro_use] pub(crate) mod macros; +pub(crate) mod append; pub(crate) mod args; pub(crate) mod autoview; pub(crate) mod cd; @@ -8,24 +9,31 @@ pub(crate) mod classified; pub(crate) mod clip; pub(crate) mod command; pub(crate) mod config; +pub(crate) mod count; pub(crate) mod cp; pub(crate) mod date; pub(crate) mod debug; +pub(crate) mod echo; pub(crate) mod enter; +pub(crate) mod env; pub(crate) mod exit; +pub(crate) mod fetch; pub(crate) mod first; -pub(crate) mod from_array; pub(crate) mod from_bson; pub(crate) mod from_csv; pub(crate) mod from_ini; pub(crate) mod from_json; pub(crate) mod from_sqlite; +pub(crate) mod from_ssv; pub(crate) mod from_toml; pub(crate) mod from_tsv; +pub(crate) mod from_url; pub(crate) mod from_xml; pub(crate) mod from_yaml; pub(crate) mod get; +pub(crate) mod group_by; pub(crate) mod help; +pub(crate) mod history; pub(crate) mod last; pub(crate) mod lines; pub(crate) mod ls; @@ -35,10 +43,12 @@ pub(crate) mod next; pub(crate) mod nth; pub(crate) mod open; pub(crate) mod pick; +pub(crate) mod pivot; pub(crate) mod plugin; pub(crate) mod post; +pub(crate) mod prepend; pub(crate) mod prev; -pub(crate) mod ps; +pub(crate) mod pwd; pub(crate) mod reject; pub(crate) mod reverse; pub(crate) mod rm; @@ -51,17 +61,16 @@ pub(crate) mod split_column; pub(crate) mod split_row; pub(crate) mod table; pub(crate) mod tags; -pub(crate) mod to_array; pub(crate) mod to_bson; pub(crate) mod to_csv; pub(crate) mod to_json; pub(crate) mod to_sqlite; pub(crate) mod to_toml; pub(crate) mod to_tsv; +pub(crate) mod to_url; pub(crate) mod to_yaml; pub(crate) mod trim; pub(crate) mod version; -pub(crate) mod vtable; pub(crate) mod where_; pub(crate) mod which_; @@ -72,27 +81,36 @@ pub(crate) use command::{ UnevaluatedCallInfo, WholeStreamCommand, }; +pub(crate) use append::Append; +pub(crate) use classified::ClassifiedCommand; pub(crate) use config::Config; +pub(crate) use count::Count; pub(crate) use cp::Cpy; pub(crate) use date::Date; pub(crate) use debug::Debug; +pub(crate) use echo::Echo; pub(crate) use enter::Enter; +pub(crate) use env::Env; pub(crate) use exit::Exit; +pub(crate) use fetch::Fetch; pub(crate) use first::First; -pub(crate) use from_array::FromArray; pub(crate) use from_bson::FromBSON; pub(crate) use from_csv::FromCSV; pub(crate) use from_ini::FromINI; pub(crate) use from_json::FromJSON; pub(crate) use from_sqlite::FromDB; pub(crate) use from_sqlite::FromSQLite; +pub(crate) use from_ssv::FromSSV; pub(crate) use from_toml::FromTOML; pub(crate) use from_tsv::FromTSV; +pub(crate) use from_url::FromURL; pub(crate) use from_xml::FromXML; pub(crate) use from_yaml::FromYAML; pub(crate) use from_yaml::FromYML; pub(crate) use get::Get; +pub(crate) use group_by::GroupBy; pub(crate) use help::Help; +pub(crate) use history::History; pub(crate) use last::Last; pub(crate) use lines::Lines; pub(crate) use ls::LS; @@ -102,9 +120,11 @@ pub(crate) use next::Next; pub(crate) use nth::Nth; pub(crate) use open::Open; pub(crate) use pick::Pick; +pub(crate) use pivot::Pivot; pub(crate) use post::Post; +pub(crate) use prepend::Prepend; pub(crate) use prev::Previous; -pub(crate) use ps::PS; +pub(crate) use pwd::PWD; pub(crate) use reject::Reject; pub(crate) use reverse::Reverse; pub(crate) use rm::Remove; @@ -117,7 +137,6 @@ pub(crate) use split_column::SplitColumn; pub(crate) use split_row::SplitRow; pub(crate) use table::Table; pub(crate) use tags::Tags; -pub(crate) use to_array::ToArray; pub(crate) use to_bson::ToBSON; pub(crate) use to_csv::ToCSV; pub(crate) use to_json::ToJSON; @@ -125,9 +144,9 @@ pub(crate) use to_sqlite::ToDB; pub(crate) use to_sqlite::ToSQLite; pub(crate) use to_toml::ToTOML; pub(crate) use to_tsv::ToTSV; +pub(crate) use to_url::ToURL; pub(crate) use to_yaml::ToYAML; pub(crate) use trim::Trim; pub(crate) use version::Version; -pub(crate) use vtable::VTable; pub(crate) use where_::Where; pub(crate) use which_::Which; diff --git a/src/commands/append.rs b/src/commands/append.rs new file mode 100644 index 0000000000..fe22c9065e --- /dev/null +++ b/src/commands/append.rs @@ -0,0 +1,47 @@ +use crate::commands::WholeStreamCommand; +use crate::errors::ShellError; +use crate::parser::CommandRegistry; +use crate::prelude::*; + +#[derive(Deserialize)] +struct AppendArgs { + row: Tagged, +} + +pub struct Append; + +impl WholeStreamCommand for Append { + fn name(&self) -> &str { + "append" + } + + fn signature(&self) -> Signature { + Signature::build("append").required( + "row value", + SyntaxShape::Any, + "the value of the row to append to the table", + ) + } + + fn usage(&self) -> &str { + "Append the given row to the table" + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, append)?.run() + } +} + +fn append( + AppendArgs { row }: AppendArgs, + RunnableContext { input, .. }: RunnableContext, +) -> Result { + let mut after: VecDeque> = VecDeque::new(); + after.push_back(row); + + Ok(OutputStream::from_input(input.values.chain(after))) +} diff --git a/src/commands/args.rs b/src/commands/args.rs index c08a075dd2..85329af5a1 100644 --- a/src/commands/args.rs +++ b/src/commands/args.rs @@ -1,4 +1,4 @@ -use crate::object::Value; +use crate::data::Value; #[derive(Debug)] pub enum LogLevel {} diff --git a/src/commands/autoview.rs b/src/commands/autoview.rs index 844a093c4a..4f7d7172a2 100644 --- a/src/commands/autoview.rs +++ b/src/commands/autoview.rs @@ -1,9 +1,14 @@ use crate::commands::{RawCommandArgs, WholeStreamCommand}; use crate::errors::ShellError; +use crate::parser::hir::{Expression, NamedArguments}; use crate::prelude::*; +use futures::stream::TryStreamExt; +use std::sync::atomic::Ordering; pub struct Autoview; +const STREAM_PAGE_SIZE: u64 = 50; + #[derive(Deserialize)] pub struct AutoviewArgs {} @@ -31,68 +36,138 @@ impl WholeStreamCommand for Autoview { pub fn autoview( AutoviewArgs {}: AutoviewArgs, - mut context: RunnableContext, + context: RunnableContext, raw: RawCommandArgs, ) -> Result { - Ok(OutputStream::new(async_stream_block! { - let input = context.input.drain_vec().await; + let binary = context.get_command("binaryview"); + let text = context.get_command("textview"); + let table = context.get_command("table"); - if input.len() > 0 { - if let Tagged { - item: Value::Binary(_), - .. - } = input[0usize] - { - let binary = context.expect_command("binaryview"); - let result = binary.run(raw.with_input(input), &context.commands); - result.collect::>().await; - } else if is_single_text_value(&input) { - let text = context.expect_command("textview"); - let result = text.run(raw.with_input(input), &context.commands); - result.collect::>().await; - } else if equal_shapes(&input) { - let table = context.expect_command("table"); - let result = table.run(raw.with_input(input), &context.commands); - result.collect::>().await; - } else { - let table = context.expect_command("table"); - let result = table.run(raw.with_input(input), &context.commands); - result.collect::>().await; + Ok(OutputStream::new(async_stream! { + let mut output_stream: OutputStream = context.input.into(); + + match output_stream.try_next().await { + Ok(Some(x)) => { + match output_stream.try_next().await { + Ok(Some(y)) => { + let ctrl_c = context.ctrl_c.clone(); + let stream = async_stream! { + yield Ok(x); + yield Ok(y); + + loop { + match output_stream.try_next().await { + Ok(Some(z)) => { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + yield Ok(z); + } + _ => break, + } + } + }; + if let Some(table) = table { + let mut new_output_stream: OutputStream = stream.to_output_stream(); + let mut finished = false; + let mut current_idx = 0; + loop { + let mut new_input = VecDeque::new(); + + for _ in 0..STREAM_PAGE_SIZE { + match new_output_stream.try_next().await { + + Ok(Some(a)) => { + if let ReturnSuccess::Value(v) = a { + new_input.push_back(v); + } + } + _ => { + finished = true; + break; + } + } + } + + let raw = raw.clone(); + + let mut command_args = raw.with_input(new_input.into()); + let mut named_args = NamedArguments::new(); + named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown()))); + command_args.call_info.args.named = Some(named_args); + + let result = table.run(command_args, &context.commands, false); + result.collect::>().await; + + if finished { + break; + } else { + current_idx += STREAM_PAGE_SIZE; + } + } + } + } + _ => { + if let ReturnSuccess::Value(x) = x { + match x { + Tagged { + item: Value::Primitive(Primitive::String(ref s)), + tag: Tag { anchor, span }, + } if anchor.is_some() => { + if let Some(text) = text { + let mut stream = VecDeque::new(); + stream.push_back(Value::string(s).tagged(Tag { anchor, span })); + let result = text.run(raw.with_input(stream.into()), &context.commands, false); + result.collect::>().await; + } else { + println!("{}", s); + } + } + Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + } => { + println!("{}", s); + } + + Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => { + if let Some(binary) = binary { + let mut stream = VecDeque::new(); + stream.push_back(x.clone()); + let result = binary.run(raw.with_input(stream.into()), &context.commands, false); + result.collect::>().await; + } else { + use pretty_hex::*; + println!("{:?}", b.hex_dump()); + } + } + + Tagged { item: Value::Error(e), .. } => { + yield Err(e); + } + Tagged { item: ref item, .. } => { + if let Some(table) = table { + let mut stream = VecDeque::new(); + stream.push_back(x.clone()); + let result = table.run(raw.with_input(stream.into()), &context.commands, false); + result.collect::>().await; + } else { + println!("{:?}", item); + } + } + } + } + } + } } + _ => { + //println!(""); + } + } + + // Needed for async_stream to type check + if false { + yield ReturnSuccess::value(Value::nothing().tagged_unknown()); } })) } - -fn equal_shapes(input: &Vec>) -> bool { - let mut items = input.iter(); - - let item = match items.next() { - Some(item) => item, - None => return false, - }; - - let desc = item.data_descriptors(); - - for item in items { - if desc != item.data_descriptors() { - return false; - } - } - - true -} - -fn is_single_text_value(input: &Vec>) -> bool { - if input.len() != 1 { - return false; - } - if let Tagged { - item: Value::Primitive(Primitive::String(_)), - .. - } = input[0] - { - true - } else { - false - } -} diff --git a/src/commands/cd.rs b/src/commands/cd.rs index a84e66fce9..65cc45231d 100644 --- a/src/commands/cd.rs +++ b/src/commands/cd.rs @@ -10,8 +10,11 @@ impl WholeStreamCommand for CD { } fn signature(&self) -> Signature { - Signature::build("cd") - .optional("directory", SyntaxType::Path) + Signature::build("cd").optional( + "directory", + SyntaxShape::Path, + "the directory to change to", + ) } fn usage(&self) -> &str { diff --git a/src/commands/classified.rs b/src/commands/classified.rs index 622d92e77f..7204af77c6 100644 --- a/src/commands/classified.rs +++ b/src/commands/classified.rs @@ -1,12 +1,11 @@ -use crate::commands::Command; use crate::parser::{hir, TokenNode}; use crate::prelude::*; use bytes::{BufMut, BytesMut}; +use derive_new::new; use futures::stream::StreamExt; use futures_codec::{Decoder, Encoder, Framed}; use log::{log_enabled, trace}; use std::io::{Error, ErrorKind}; -use std::sync::Arc; use subprocess::Exec; /// A simple `Codec` implementation that splits up data into lines. @@ -73,126 +72,144 @@ impl ClassifiedInputStream { } } +#[derive(Debug)] pub(crate) struct ClassifiedPipeline { pub(crate) commands: Vec, } +#[derive(Debug, Eq, PartialEq)] pub(crate) enum ClassifiedCommand { #[allow(unused)] Expr(TokenNode), Internal(InternalCommand), + #[allow(unused)] + Dynamic(hir::Call), External(ExternalCommand), } +#[derive(new, Debug, Eq, PartialEq)] pub(crate) struct InternalCommand { - pub(crate) command: Arc, - pub(crate) name_span: Span, + pub(crate) name: String, + pub(crate) name_tag: Tag, + pub(crate) args: hir::Call, +} + +#[derive(new, Debug, Eq, PartialEq)] +pub(crate) struct DynamicCommand { pub(crate) args: hir::Call, } impl InternalCommand { - pub(crate) async fn run( + pub(crate) fn run( self, context: &mut Context, input: ClassifiedInputStream, source: Text, + is_first_command: bool, ) -> Result { if log_enabled!(log::Level::Trace) { trace!(target: "nu::run::internal", "->"); - trace!(target: "nu::run::internal", "{}", self.command.name()); + trace!(target: "nu::run::internal", "{}", self.name); trace!(target: "nu::run::internal", "{}", self.args.debug(&source)); } let objects: InputStream = trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects); - let result = context.run_command( - self.command, - self.name_span.clone(), - context.source_map.clone(), - self.args, - &source, - objects, - ); + let command = context.expect_command(&self.name); + + let result = { + context.run_command( + command, + self.name_tag.clone(), + self.args, + &source, + objects, + is_first_command, + ) + }; let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result); let mut result = result.values; + let mut context = context.clone(); - let mut stream = VecDeque::new(); - while let Some(item) = result.next().await { - match item? { - ReturnSuccess::Action(action) => match action { - CommandAction::ChangePath(path) => { - context.shell_manager.set_path(path); - } - CommandAction::AddSpanSource(uuid, span_source) => { - context.add_span_source(uuid, span_source); - } - CommandAction::Exit => std::process::exit(0), - CommandAction::EnterHelpShell(value) => { - match value { - Tagged { - item: Value::Primitive(Primitive::String(cmd)), - .. - } => { - context.shell_manager.insert_at_current(Box::new( - HelpShell::for_command( - Tagged::from_simple_spanned_item( - Value::string(cmd), - Span::unknown(), - ), - &context.registry().clone(), - )?, - )); - } - _ => { - context.shell_manager.insert_at_current(Box::new( - HelpShell::index(&context.registry().clone())?, - )); + let stream = async_stream! { + while let Some(item) = result.next().await { + match item { + Ok(ReturnSuccess::Action(action)) => match action { + CommandAction::ChangePath(path) => { + context.shell_manager.set_path(path); + } + CommandAction::Exit => std::process::exit(0), // TODO: save history.txt + CommandAction::EnterHelpShell(value) => { + match value { + Tagged { + item: Value::Primitive(Primitive::String(cmd)), + tag, + } => { + context.shell_manager.insert_at_current(Box::new( + HelpShell::for_command( + Value::string(cmd).tagged(tag), + &context.registry(), + ).unwrap(), + )); + } + _ => { + context.shell_manager.insert_at_current(Box::new( + HelpShell::index(&context.registry()).unwrap(), + )); + } } } - } - CommandAction::EnterValueShell(value) => { - context - .shell_manager - .insert_at_current(Box::new(ValueShell::new(value))); - } - CommandAction::EnterShell(location) => { - context.shell_manager.insert_at_current(Box::new( - FilesystemShell::with_location(location, context.registry().clone())?, - )); - } - CommandAction::PreviousShell => { - context.shell_manager.prev(); - } - CommandAction::NextShell => { - context.shell_manager.next(); - } - CommandAction::LeaveShell => { - context.shell_manager.remove_at_current(); - if context.shell_manager.is_empty() { - std::process::exit(0); + CommandAction::EnterValueShell(value) => { + context + .shell_manager + .insert_at_current(Box::new(ValueShell::new(value))); } - } - }, + CommandAction::EnterShell(location) => { + context.shell_manager.insert_at_current(Box::new( + FilesystemShell::with_location(location, context.registry().clone()).unwrap(), + )); + } + CommandAction::PreviousShell => { + context.shell_manager.prev(); + } + CommandAction::NextShell => { + context.shell_manager.next(); + } + CommandAction::LeaveShell => { + context.shell_manager.remove_at_current(); + if context.shell_manager.is_empty() { + std::process::exit(0); // TODO: save history.txt + } + } + }, - ReturnSuccess::Value(v) => { - stream.push_back(v); + Ok(ReturnSuccess::Value(v)) => { + yield Ok(v); + } + + Err(x) => { + yield Ok(Value::Error(x).tagged_unknown()); + break; + } } } - } + }; - Ok(stream.into()) + Ok(stream.to_input_stream()) } } +#[derive(Debug, Eq, PartialEq)] pub(crate) struct ExternalCommand { pub(crate) name: String, - pub(crate) name_span: Span, + pub(crate) name_tag: Tag, pub(crate) args: Vec>, } +#[derive(Debug)] pub(crate) enum StreamNext { Last, External, @@ -208,7 +225,6 @@ impl ExternalCommand { ) -> Result { let stdin = input.stdin; let inputs: Vec> = input.objects.into_vec().await; - let name_span = self.name_span.clone(); trace!(target: "nu::run::external", "-> {}", self.name); trace!(target: "nu::run::external", "inputs = {:?}", inputs); @@ -218,115 +234,66 @@ impl ExternalCommand { arg_string.push_str(&arg); } + trace!(target: "nu::run::external", "command = {:?}", self.name); + let mut process; - - #[cfg(windows)] - { - process = Exec::shell(&self.name); - - if arg_string.contains("$it") { - let mut first = true; - - for i in &inputs { - if i.as_string().is_err() { - let mut span = None; - for arg in &self.args { - if arg.item.contains("$it") { - span = Some(arg.span()); - } - } - if let Some(span) = span { - return Err(ShellError::labeled_error( + if arg_string.contains("$it") { + let input_strings = inputs + .iter() + .map(|i| { + i.as_string().map_err(|_| { + let arg = self.args.iter().find(|arg| arg.item.contains("$it")); + if let Some(arg) = arg { + ShellError::labeled_error( "External $it needs string data", - "given object instead of string data", - span, - )); + "given row instead of string data", + arg.tag(), + ) } else { - return Err(ShellError::string("Error: $it needs string data")); + ShellError::labeled_error( + "$it needs string data", + "given something else", + self.name_tag.clone(), + ) } - } - if !first { - process = process.arg("&&"); - process = process.arg(&self.name); - } else { - first = false; - } + }) + }) + .collect::, ShellError>>()?; - for arg in &self.args { - if arg.chars().all(|c| c.is_whitespace()) { - continue; - } - - process = process.arg(&arg.replace("$it", &i.as_string()?)); - } - } - } else { - for arg in &self.args { - let arg_chars: Vec<_> = arg.chars().collect(); - if arg_chars.len() > 1 - && arg_chars[0] == '"' - && arg_chars[arg_chars.len() - 1] == '"' - { - // quoted string - let new_arg: String = arg_chars[1..arg_chars.len() - 1].iter().collect(); - process = process.arg(new_arg); + let commands = input_strings.iter().map(|i| { + let args = self.args.iter().filter_map(|arg| { + if arg.chars().all(|c| c.is_whitespace()) { + None } else { - process = process.arg(arg.item.clone()); + Some(arg.replace("$it", &i)) } + }); + + format!("{} {}", self.name, itertools::join(args, " ")) + }); + + process = Exec::shell(itertools::join(commands, " && ")) + } else { + process = Exec::cmd(&self.name); + for arg in &self.args { + let arg_chars: Vec<_> = arg.chars().collect(); + if arg_chars.len() > 1 + && arg_chars[0] == '"' + && arg_chars[arg_chars.len() - 1] == '"' + { + // quoted string + let new_arg: String = arg_chars[1..arg_chars.len() - 1].iter().collect(); + process = process.arg(new_arg); + } else { + process = process.arg(arg.item.clone()); } } } - #[cfg(not(windows))] - { - let mut new_arg_string = self.name.to_string(); - if arg_string.contains("$it") { - let mut first = true; - for i in &inputs { - let i = match i.as_string() { - Err(_err) => { - let mut span = name_span; - for arg in &self.args { - if arg.item.contains("$it") { - span = arg.span(); - } - } - return Err(ShellError::labeled_error( - "External $it needs string data", - "given object instead of string data", - span, - )); - } - Ok(val) => val, - }; - - if !first { - new_arg_string.push_str("&&"); - new_arg_string.push_str(&self.name); - } else { - first = false; - } - - for arg in &self.args { - if arg.chars().all(|c| c.is_whitespace()) { - continue; - } - - new_arg_string.push_str(" "); - new_arg_string.push_str(&arg.replace("$it", &i)); - } - } - } else { - for arg in &self.args { - new_arg_string.push_str(" "); - new_arg_string.push_str(&arg); - } - } - - process = Exec::shell(new_arg_string); - } process = process.cwd(context.shell_manager.path()); + trace!(target: "nu::run::external", "cwd = {:?}", context.shell_manager.path()); + let mut process = match stream_next { StreamNext::Last => process, StreamNext::External | StreamNext::Internal => { @@ -334,32 +301,60 @@ impl ExternalCommand { } }; + trace!(target: "nu::run::external", "set up stdout pipe"); + if let Some(stdin) = stdin { process = process.stdin(stdin); } - let mut popen = process.popen()?; + trace!(target: "nu::run::external", "set up stdin pipe"); + trace!(target: "nu::run::external", "built process {:?}", process); - match stream_next { - StreamNext::Last => { - popen.wait()?; - Ok(ClassifiedInputStream::new()) - } - StreamNext::External => { - let stdout = popen.stdout.take().unwrap(); - Ok(ClassifiedInputStream::from_stdout(stdout)) - } - StreamNext::Internal => { - let stdout = popen.stdout.take().unwrap(); - let file = futures::io::AllowStdIo::new(stdout); - let stream = Framed::new(file, LinesCodec {}); - let stream = stream.map(move |line| { - Tagged::from_simple_spanned_item(Value::string(line.unwrap()), name_span) - }); - Ok(ClassifiedInputStream::from_input_stream( - stream.boxed() as BoxStream<'static, Tagged> - )) + let popen = process.popen(); + + trace!(target: "nu::run::external", "next = {:?}", stream_next); + + let name_tag = self.name_tag.clone(); + if let Ok(mut popen) = popen { + match stream_next { + StreamNext::Last => { + let _ = popen.detach(); + loop { + match popen.poll() { + None => { + let _ = std::thread::sleep(std::time::Duration::new(0, 100000000)); + } + _ => { + let _ = popen.terminate(); + break; + } + } + } + Ok(ClassifiedInputStream::new()) + } + StreamNext::External => { + let _ = popen.detach(); + let stdout = popen.stdout.take().unwrap(); + Ok(ClassifiedInputStream::from_stdout(stdout)) + } + StreamNext::Internal => { + let _ = popen.detach(); + let stdout = popen.stdout.take().unwrap(); + let file = futures::io::AllowStdIo::new(stdout); + let stream = Framed::new(file, LinesCodec {}); + let stream = + stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag)); + Ok(ClassifiedInputStream::from_input_stream( + stream.boxed() as BoxStream<'static, Tagged> + )) + } } + } else { + return Err(ShellError::labeled_error( + "Command not found", + "command not found", + name_tag, + )); } } } diff --git a/src/commands/clip.rs b/src/commands/clip.rs index 9bf7fa9f3a..ac3ded1d4b 100644 --- a/src/commands/clip.rs +++ b/src/commands/clip.rs @@ -5,7 +5,6 @@ pub mod clipboard { use crate::errors::ShellError; use crate::prelude::*; use futures::stream::StreamExt; - use futures_async_stream::async_stream_block; use clipboard::{ClipboardContext, ClipboardProvider}; @@ -40,10 +39,13 @@ pub mod clipboard { ClipArgs {}: ClipArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { - let stream = async_stream_block! { + let stream = async_stream! { let values: Vec> = input.values.collect().await; - inner_clip(values, name).await; + let mut clip_stream = inner_clip(values, name).await; + while let Some(value) = clip_stream.next().await { + yield value; + } }; let stream: BoxStream<'static, ReturnValue> = stream.boxed(); @@ -51,7 +53,7 @@ pub mod clipboard { Ok(OutputStream::from(stream)) } - async fn inner_clip(input: Vec>, name: Span) -> OutputStream { + async fn inner_clip(input: Vec>, name: Tag) -> OutputStream { let mut clip_context: ClipboardContext = ClipboardProvider::new().unwrap(); let mut new_copy_data = String::new(); diff --git a/src/commands/command.rs b/src/commands/command.rs index 67286e98a1..6677dfbd7e 100644 --- a/src/commands/command.rs +++ b/src/commands/command.rs @@ -1,7 +1,6 @@ -use crate::context::{SourceMap, SpanSource}; +use crate::data::Value; use crate::errors::ShellError; use crate::evaluate::Scope; -use crate::object::Value; use crate::parser::hir; use crate::parser::{registry, ConfigDeserializer}; use crate::prelude::*; @@ -11,14 +10,13 @@ use serde::{Deserialize, Serialize}; use std::fmt; use std::ops::Deref; use std::path::PathBuf; -use uuid::Uuid; +use std::sync::atomic::AtomicBool; #[derive(Deserialize, Serialize, Debug, Clone)] pub struct UnevaluatedCallInfo { pub args: hir::Call, pub source: Text, - pub source_map: SourceMap, - pub name_span: Span, + pub name_tag: Tag, } impl ToDebug for UnevaluatedCallInfo { @@ -37,44 +35,15 @@ impl UnevaluatedCallInfo { Ok(CallInfo { args, - source_map: self.source_map, - name_span: self.name_span, + name_tag: self.name_tag, }) } - - pub fn has_it_or_block(&self) -> bool { - use hir::RawExpression; - use hir::Variable; - - if let Some(positional) = &self.args.positional() { - for pos in positional { - match pos { - Tagged { - item: RawExpression::Variable(Variable::It(_)), - .. - } => { - return true; - } - Tagged { - item: RawExpression::Block(_), - .. - } => { - return true; - } - _ => {} - } - } - } - - false - } } #[derive(Deserialize, Serialize, Debug, Clone)] pub struct CallInfo { pub args: registry::EvaluatedArgs, - pub source_map: SourceMap, - pub name_span: Span, + pub name_tag: Tag, } impl CallInfo { @@ -89,7 +58,7 @@ impl CallInfo { args: T::deserialize(&mut deserializer)?, context: RunnablePerItemContext { shell_manager: shell_manager.clone(), - name: self.name_span, + name: self.name_tag.clone(), }, callback, }) @@ -100,6 +69,7 @@ impl CallInfo { #[get = "pub(crate)"] pub struct CommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: UnevaluatedCallInfo, pub input: InputStream, @@ -109,6 +79,7 @@ pub struct CommandArgs { #[get = "pub(crate)"] pub struct RawCommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: UnevaluatedCallInfo, } @@ -117,6 +88,7 @@ impl RawCommandArgs { pub fn with_input(self, input: Vec>) -> CommandArgs { CommandArgs { host: self.host, + ctrl_c: self.ctrl_c, shell_manager: self.shell_manager, call_info: self.call_info, input: input.into(), @@ -136,12 +108,14 @@ impl CommandArgs { registry: ®istry::CommandRegistry, ) -> Result { let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let shell_manager = self.shell_manager.clone(); let input = self.input; let call_info = self.call_info.evaluate(registry, &Scope::empty())?; Ok(EvaluatedWholeStreamCommandArgs::new( host, + ctrl_c, shell_manager, call_info, input, @@ -154,12 +128,13 @@ impl CommandArgs { callback: fn(T, RunnableContext) -> Result, ) -> Result, ShellError> { let shell_manager = self.shell_manager.clone(); - let source_map = self.call_info.source_map.clone(); let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let args = self.evaluate_once(registry)?; + let call_info = args.call_info.clone(); let (input, args) = args.split(); - let name_span = args.call_info.name_span; - let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); + let name_tag = args.call_info.name_tag; + let mut deserializer = ConfigDeserializer::from_call_info(call_info); Ok(RunnableArgs { args: T::deserialize(&mut deserializer)?, @@ -167,9 +142,9 @@ impl CommandArgs { input, commands: registry.clone(), shell_manager, - name: name_span, - source_map, + name: name_tag, host, + ctrl_c, }, callback, }) @@ -182,17 +157,20 @@ impl CommandArgs { ) -> Result, ShellError> { let raw_args = RawCommandArgs { host: self.host.clone(), + ctrl_c: self.ctrl_c.clone(), shell_manager: self.shell_manager.clone(), call_info: self.call_info.clone(), }; let shell_manager = self.shell_manager.clone(); - let source_map = self.call_info.source_map.clone(); let host = self.host.clone(); + let ctrl_c = self.ctrl_c.clone(); let args = self.evaluate_once(registry)?; + let call_info = args.call_info.clone(); + let (input, args) = args.split(); - let name_span = args.call_info.name_span; - let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); + let name_tag = args.call_info.name_tag; + let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone()); Ok(RunnableRawArgs { args: T::deserialize(&mut deserializer)?, @@ -200,9 +178,9 @@ impl CommandArgs { input, commands: registry.clone(), shell_manager, - name: name_span, - source_map, + name: name_tag, host, + ctrl_c, }, raw_args, callback, @@ -212,7 +190,7 @@ impl CommandArgs { pub struct RunnablePerItemContext { pub shell_manager: ShellManager, - pub name: Span, + pub name: Tag, } impl RunnablePerItemContext { @@ -225,16 +203,14 @@ pub struct RunnableContext { pub input: InputStream, pub shell_manager: ShellManager, pub host: Arc>, + pub ctrl_c: Arc, pub commands: CommandRegistry, - pub source_map: SourceMap, - pub name: Span, + pub name: Tag, } impl RunnableContext { - pub fn expect_command(&self, name: &str) -> Arc { - self.commands - .get_command(name) - .expect(&format!("Expected command {}", name)) + pub fn get_command(&self, name: &str) -> Option> { + self.commands.get_command(name) } } @@ -293,6 +269,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs { impl EvaluatedWholeStreamCommandArgs { pub fn new( host: Arc>, + ctrl_c: Arc, shell_manager: ShellManager, call_info: CallInfo, input: impl Into, @@ -300,6 +277,7 @@ impl EvaluatedWholeStreamCommandArgs { EvaluatedWholeStreamCommandArgs { args: EvaluatedCommandArgs { host, + ctrl_c, shell_manager, call_info, }, @@ -307,8 +285,8 @@ impl EvaluatedWholeStreamCommandArgs { } } - pub fn name_span(&self) -> Span { - self.args.call_info.name_span + pub fn name_tag(&self) -> Tag { + self.args.call_info.name_tag.clone() } pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) { @@ -340,12 +318,14 @@ impl Deref for EvaluatedFilterCommandArgs { impl EvaluatedFilterCommandArgs { pub fn new( host: Arc>, + ctrl_c: Arc, shell_manager: ShellManager, call_info: CallInfo, ) -> EvaluatedFilterCommandArgs { EvaluatedFilterCommandArgs { args: EvaluatedCommandArgs { host, + ctrl_c, shell_manager, call_info, }, @@ -357,6 +337,7 @@ impl EvaluatedFilterCommandArgs { #[get = "pub(crate)"] pub struct EvaluatedCommandArgs { pub host: Arc>, + pub ctrl_c: Arc, pub shell_manager: ShellManager, pub call_info: CallInfo, } @@ -399,7 +380,6 @@ impl EvaluatedCommandArgs { #[derive(Debug, Serialize, Deserialize)] pub enum CommandAction { ChangePath(String), - AddSpanSource(Uuid, SpanSource), Exit, EnterShell(String), EnterValueShell(Tagged), @@ -413,9 +393,6 @@ impl ToDebug for CommandAction { fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { match self { CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s), - CommandAction::AddSpanSource(u, source) => { - write!(f, "action:add-span-source={}@{:?}", u, source) - } CommandAction::Exit => write!(f, "action:exit"), CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s), CommandAction::EnterValueShell(t) => { @@ -467,12 +444,6 @@ impl ReturnSuccess { pub fn action(input: CommandAction) -> ReturnValue { Ok(ReturnSuccess::Action(input)) } - - pub fn spanned_value(input: Value, span: Span) -> ReturnValue { - Ok(ReturnSuccess::Value(Tagged::from_simple_spanned_item( - input, span, - ))) - } } pub trait WholeStreamCommand: Send + Sync { @@ -496,6 +467,10 @@ pub trait WholeStreamCommand: Send + Sync { args: CommandArgs, registry: ®istry::CommandRegistry, ) -> Result; + + fn is_binary(&self) -> bool { + false + } } pub trait PerItemCommand: Send + Sync { @@ -521,6 +496,10 @@ pub trait PerItemCommand: Send + Sync { raw_args: &RawCommandArgs, input: Tagged, ) -> Result; + + fn is_binary(&self) -> bool { + false + } } pub enum Command { @@ -528,6 +507,15 @@ pub enum Command { PerItem(Arc), } +impl std::fmt::Debug for Command { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()), + Command::PerItem(command) => write!(f, "PerItem({})", command.name()), + } + } +} + impl Command { pub fn name(&self) -> &str { match self { @@ -550,13 +538,20 @@ impl Command { } } - pub fn run(&self, args: CommandArgs, registry: ®istry::CommandRegistry) -> OutputStream { + pub fn run( + &self, + args: CommandArgs, + registry: ®istry::CommandRegistry, + is_first_command: bool, + ) -> OutputStream { match self { Command::WholeStream(command) => match command.run(args, registry) { Ok(stream) => stream, Err(err) => OutputStream::one(Err(err)), }, - Command::PerItem(command) => self.run_helper(command.clone(), args, registry.clone()), + Command::PerItem(command) => { + self.run_helper(command.clone(), args, registry.clone(), is_first_command) + } } } @@ -565,14 +560,16 @@ impl Command { command: Arc, args: CommandArgs, registry: CommandRegistry, + is_first_command: bool, ) -> OutputStream { let raw_args = RawCommandArgs { host: args.host, + ctrl_c: args.ctrl_c, shell_manager: args.shell_manager, call_info: args.call_info, }; - if raw_args.call_info.has_it_or_block() { + if !is_first_command { let out = args .input .values @@ -592,22 +589,33 @@ impl Command { out.to_output_stream() } else { let nothing = Value::nothing().tagged(Tag::unknown()); + let call_info = raw_args .clone() .call_info - .evaluate(®istry, &Scope::it_value(nothing.clone())) - .unwrap(); - // We don't have an $it or block, so just execute what we have + .evaluate(®istry, &Scope::it_value(nothing.clone())); - match command - .run(&call_info, ®istry, &raw_args, nothing) - .into() - { - Ok(o) => o, + match call_info { + Ok(call_info) => { + match command + .run(&call_info, ®istry, &raw_args, nothing) + .into() + { + Ok(o) => o, + Err(e) => OutputStream::one(Err(e)), + } + } Err(e) => OutputStream::one(Err(e)), } } } + + pub fn is_binary(&self) -> bool { + match self { + Command::WholeStream(command) => command.is_binary(), + Command::PerItem(command) => command.is_binary(), + } + } } pub struct FnFilterCommand { @@ -631,6 +639,7 @@ impl WholeStreamCommand for FnFilterCommand { ) -> Result { let CommandArgs { host, + ctrl_c, shell_manager, call_info, input, @@ -648,8 +657,12 @@ impl WholeStreamCommand for FnFilterCommand { Ok(args) => args, }; - let args = - EvaluatedFilterCommandArgs::new(host.clone(), shell_manager.clone(), call_info); + let args = EvaluatedFilterCommandArgs::new( + host.clone(), + ctrl_c.clone(), + shell_manager.clone(), + call_info, + ); match func(args) { Err(err) => return OutputStream::from(vec![Err(err)]).values, diff --git a/src/commands/config.rs b/src/commands/config.rs index 56cce62270..a85920e455 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -1,16 +1,17 @@ -use crate::prelude::*; - use crate::commands::WholeStreamCommand; +use crate::data::{config, Value}; use crate::errors::ShellError; -use crate::object::{config, Value}; -use crate::parser::hir::SyntaxType; +use crate::parser::hir::SyntaxShape; use crate::parser::registry::{self}; +use crate::prelude::*; use std::iter::FromIterator; +use std::path::PathBuf; pub struct Config; #[derive(Deserialize)] pub struct ConfigArgs { + load: Option>, set: Option<(Tagged, Tagged)>, get: Option>, clear: Tagged, @@ -25,11 +26,16 @@ impl WholeStreamCommand for Config { fn signature(&self) -> Signature { Signature::build("config") - .named("set", SyntaxType::Any) - .named("get", SyntaxType::Any) - .named("remove", SyntaxType::Any) - .switch("clear") - .switch("path") + .named( + "load", + SyntaxShape::Path, + "load the config from the path give", + ) + .named("set", SyntaxShape::Any, "set a value in the config") + .named("get", SyntaxShape::Any, "get a value from the config") + .named("remove", SyntaxShape::Any, "remove a value from the config") + .switch("clear", "clear the config") + .switch("path", "return the path to the config file") } fn usage(&self) -> &str { @@ -47,6 +53,7 @@ impl WholeStreamCommand for Config { pub fn config( ConfigArgs { + load, set, get, clear, @@ -55,77 +62,78 @@ pub fn config( }: ConfigArgs, RunnableContext { name, .. }: RunnableContext, ) -> Result { - let mut result = crate::object::config::config(name)?; + let name_span = name.clone(); + + let configuration = if let Some(supplied) = load { + Some(supplied.item().clone()) + } else { + None + }; + + let mut result = crate::data::config::read(name_span, &configuration)?; if let Some(v) = get { let key = v.to_string(); let value = result .get(&key) - .ok_or_else(|| ShellError::string(&format!("Missing key {} in config", key)))?; + .ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?; - return Ok( - stream![value.clone()].into(), // futures::stream::once(futures::future::ready(ReturnSuccess::Value(value.clone()))).into(), - ); + let mut results = VecDeque::new(); + + match value { + Tagged { + item: Value::Table(list), + .. + } => { + for l in list { + results.push_back(ReturnSuccess::value(l.clone())); + } + } + x => results.push_back(ReturnSuccess::value(x.clone())), + } + + return Ok(results.to_output_stream()); } if let Some((key, value)) = set { result.insert(key.to_string(), value.clone()); - config::write_config(&result)?; + config::write(&result, &configuration)?; - return Ok(stream![Tagged::from_simple_spanned_item( - Value::Object(result.into()), - value.span() - )] - .from_input_stream()); + return Ok(stream![Value::Row(result.into()).tagged(value.tag())].from_input_stream()); } - if let Tagged { - item: true, - tag: Tag { span, .. }, - } = clear - { + if let Tagged { item: true, tag } = clear { result.clear(); - config::write_config(&result)?; + config::write(&result, &configuration)?; - return Ok(stream![Tagged::from_simple_spanned_item( - Value::Object(result.into()), - span - )] - .from_input_stream()); + return Ok(stream![Value::Row(result.into()).tagged(tag)].from_input_stream()); } - if let Tagged { - item: true, - tag: Tag { span, .. }, - } = path - { - let path = config::config_path()?; + if let Tagged { item: true, tag } = path { + let path = config::default_path_for(&configuration)?; - return Ok(stream![Tagged::from_simple_spanned_item( - Value::Primitive(Primitive::Path(path)), - span - )] - .from_input_stream()); + return Ok(stream![Value::Primitive(Primitive::Path(path)).tagged(tag)].from_input_stream()); } if let Some(v) = remove { let key = v.to_string(); if result.contains_key(&key) { - result.remove(&key); - config::write_config(&result)?; + result.swap_remove(&key); + config::write(&result, &configuration)?; } else { - return Err(ShellError::string(&format!( - "{} does not exist in config", - key - ))); + return Err(ShellError::labeled_error( + "Key does not exist in config", + "key", + v.tag(), + )); } - let obj = VecDeque::from_iter(vec![Value::Object(result.into()).simple_spanned(v.span())]); + let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]); return Ok(obj.from_input_stream()); } - return Ok(vec![Value::Object(result.into()).simple_spanned(name)].into()); + return Ok(vec![Value::Row(result.into()).tagged(name)].into()); } diff --git a/src/commands/count.rs b/src/commands/count.rs new file mode 100644 index 0000000000..5e44283737 --- /dev/null +++ b/src/commands/count.rs @@ -0,0 +1,46 @@ +use crate::commands::WholeStreamCommand; +use crate::data::Value; +use crate::errors::ShellError; +use crate::parser::CommandRegistry; +use crate::prelude::*; +use futures::stream::StreamExt; + +pub struct Count; + +#[derive(Deserialize)] +pub struct CountArgs {} + +impl WholeStreamCommand for Count { + fn name(&self) -> &str { + "count" + } + + fn signature(&self) -> Signature { + Signature::build("count") + } + + fn usage(&self) -> &str { + "Show the total number of rows." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, count)?.run() + } +} + +pub fn count( + CountArgs {}: CountArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let rows: Vec> = input.values.collect().await; + + yield ReturnSuccess::value(Value::int(rows.len()).tagged(name)) + }; + + Ok(stream.to_output_stream()) +} diff --git a/src/commands/cp.rs b/src/commands/cp.rs index 8160fc9d2d..5ca21adb1e 100644 --- a/src/commands/cp.rs +++ b/src/commands/cp.rs @@ -1,6 +1,6 @@ use crate::commands::command::RunnablePerItemContext; use crate::errors::ShellError; -use crate::parser::hir::SyntaxType; +use crate::parser::hir::SyntaxShape; use crate::parser::registry::{CommandRegistry, Signature}; use crate::prelude::*; use std::path::PathBuf; @@ -21,10 +21,9 @@ impl PerItemCommand for Cpy { fn signature(&self) -> Signature { Signature::build("cp") - .required("src", SyntaxType::Path) - .required("dst", SyntaxType::Path) - .named("file", SyntaxType::Any) - .switch("recursive") + .required("src", SyntaxShape::Pattern, "the place to copy from") + .required("dst", SyntaxShape::Path, "the place to copy to") + .switch("recursive", "copy recursively through subdirectories") } fn usage(&self) -> &str { diff --git a/src/commands/date.rs b/src/commands/date.rs index 0e2fee563a..24ebc876e4 100644 --- a/src/commands/date.rs +++ b/src/commands/date.rs @@ -1,5 +1,5 @@ +use crate::data::{Dictionary, Value}; use crate::errors::ShellError; -use crate::object::{Dictionary, Value}; use crate::prelude::*; use chrono::{DateTime, Local, Utc}; @@ -18,8 +18,8 @@ impl WholeStreamCommand for Date { fn signature(&self) -> Signature { Signature::build("date") - .switch("utc") - .switch("local") + .switch("utc", "use universal time (UTC)") + .switch("local", "use the local time") } fn usage(&self) -> &str { @@ -35,58 +35,40 @@ impl WholeStreamCommand for Date { } } -pub fn date_to_value(dt: DateTime, span: Span) -> Tagged +pub fn date_to_value(dt: DateTime, tag: Tag) -> Tagged where T::Offset: Display, { let mut indexmap = IndexMap::new(); - indexmap.insert( - "year".to_string(), - Tagged::from_simple_spanned_item(Value::int(dt.year()), span), - ); - indexmap.insert( - "month".to_string(), - Tagged::from_simple_spanned_item(Value::int(dt.month()), span), - ); - indexmap.insert( - "day".to_string(), - Tagged::from_simple_spanned_item(Value::int(dt.day()), span), - ); - indexmap.insert( - "hour".to_string(), - Tagged::from_simple_spanned_item(Value::int(dt.hour()), span), - ); - indexmap.insert( - "minute".to_string(), - Tagged::from_simple_spanned_item(Value::int(dt.minute()), span), - ); - indexmap.insert( - "second".to_string(), - Tagged::from_simple_spanned_item(Value::int(dt.second()), span), - ); + indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag)); + indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag)); + indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag)); + indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag)); + indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag)); + indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag)); let tz = dt.offset(); indexmap.insert( "timezone".to_string(), - Tagged::from_simple_spanned_item(Value::string(format!("{}", tz)), span), + Value::string(format!("{}", tz)).tagged(&tag), ); - Tagged::from_simple_spanned_item(Value::Object(Dictionary::from(indexmap)), span) + Value::Row(Dictionary::from(indexmap)).tagged(&tag) } pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; let mut date_out = VecDeque::new(); - let span = args.call_info.name_span; + let tag = args.call_info.name_tag.clone(); let value = if args.has("utc") { let utc: DateTime = Utc::now(); - date_to_value(utc, span) + date_to_value(utc, tag) } else { let local: DateTime = Local::now(); - date_to_value(local, span) + date_to_value(local, tag) }; date_out.push_back(value); diff --git a/src/commands/echo.rs b/src/commands/echo.rs new file mode 100644 index 0000000000..db4993d017 --- /dev/null +++ b/src/commands/echo.rs @@ -0,0 +1,71 @@ +use crate::data::Value; +use crate::errors::ShellError; +use crate::prelude::*; + +use crate::parser::registry::Signature; + +pub struct Echo; + +impl PerItemCommand for Echo { + fn name(&self) -> &str { + "echo" + } + + fn signature(&self) -> Signature { + Signature::build("echo").rest(SyntaxShape::Any, "the values to echo") + } + + fn usage(&self) -> &str { + "Echo the arguments back to the user." + } + + fn run( + &self, + call_info: &CallInfo, + registry: &CommandRegistry, + raw_args: &RawCommandArgs, + _input: Tagged, + ) -> Result { + run(call_info, registry, raw_args) + } +} + +fn run( + call_info: &CallInfo, + _registry: &CommandRegistry, + _raw_args: &RawCommandArgs, +) -> Result { + let name = call_info.name_tag.clone(); + + let mut output = String::new(); + + let mut first = true; + + if let Some(ref positional) = call_info.args.positional { + for i in positional { + match i.as_string() { + Ok(s) => { + if !first { + output.push_str(" "); + } else { + first = false; + } + + output.push_str(&s); + } + _ => { + return Err(ShellError::type_error( + "a string-compatible value", + i.tagged_type_name(), + )) + } + } + } + } + + let stream = VecDeque::from(vec![Ok(ReturnSuccess::Value( + Value::string(output).tagged(name), + ))]); + + Ok(stream.to_output_stream()) +} diff --git a/src/commands/enter.rs b/src/commands/enter.rs index dda96ab2e9..59f7ca0f21 100644 --- a/src/commands/enter.rs +++ b/src/commands/enter.rs @@ -14,7 +14,11 @@ impl PerItemCommand for Enter { } fn signature(&self) -> registry::Signature { - Signature::build("enter").required("location", SyntaxType::Block) + Signature::build("enter").required( + "location", + SyntaxShape::Path, + "the location to create a new shell from", + ) } fn usage(&self) -> &str { @@ -32,49 +36,52 @@ impl PerItemCommand for Enter { let raw_args = raw_args.clone(); match call_info.args.expect_nth(0)? { Tagged { - item: Value::Primitive(Primitive::String(location)), + item: Value::Primitive(Primitive::Path(location)), + tag, .. } => { - let location = location.to_string(); - let location_clone = location.to_string(); + let location_string = location.display().to_string(); + let location_clone = location_string.clone(); + let tag_clone = tag.clone(); - if registry.has(&location) { - Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell( - Value::string(location_clone).tagged(Tag::unknown()), - )))] - .into()) + if location.starts_with("help") { + let spec = location_string.split(":").collect::>(); + + let (_, command) = (spec[0], spec[1]); + + if registry.has(command) { + Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell( + Value::string(command).tagged(Tag::unknown()), + )))] + .into()) + } else { + Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell( + Value::nothing().tagged(Tag::unknown()), + )))] + .into()) + } } else if PathBuf::from(location).is_dir() { Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterShell( location_clone, )))] .into()) } else { - let stream = async_stream_block! { + let stream = async_stream! { // If it's a file, attempt to open the file as a value and enter it let cwd = raw_args.shell_manager.path(); let full_path = std::path::PathBuf::from(cwd); - let (file_extension, contents, contents_tag, span_source) = + let (file_extension, contents, contents_tag) = crate::commands::open::fetch( &full_path, &location_clone, - Span::unknown(), - ) - .await.unwrap(); - - if let Some(uuid) = contents_tag.origin { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddSpanSource( - uuid, - span_source, - )); - } - + tag_clone.span, + ).await?; match contents { Value::Primitive(Primitive::String(_)) => { - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); @@ -83,6 +90,7 @@ impl PerItemCommand for Enter { { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -91,13 +99,13 @@ impl PerItemCommand for Enter { named: None, }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, - name_span: raw_args.call_info.name_span, + name_tag: raw_args.call_info.name_tag, }, }; let mut result = converter.run( new_args.with_input(vec![tagged_contents]), ®istry, + false ); let result_vec: Vec> = result.drain_vec().await; @@ -110,7 +118,7 @@ impl PerItemCommand for Enter { yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell( Tagged { item, - tag: contents_tag, + tag: contents_tag.clone(), }))); } x => yield x, diff --git a/src/commands/env.rs b/src/commands/env.rs new file mode 100644 index 0000000000..0572b499c1 --- /dev/null +++ b/src/commands/env.rs @@ -0,0 +1,76 @@ +use crate::cli::History; +use crate::data::config; +use crate::data::{Dictionary, Value}; +use crate::errors::ShellError; +use crate::prelude::*; +use crate::TaggedDictBuilder; + +use crate::commands::WholeStreamCommand; +use crate::parser::registry::Signature; +use indexmap::IndexMap; + +pub struct Env; + +impl WholeStreamCommand for Env { + fn name(&self) -> &str { + "env" + } + + fn signature(&self) -> Signature { + Signature::build("env") + } + + fn usage(&self) -> &str { + "Get the current environment." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + env(args, registry) + } +} + +pub fn get_environment(tag: Tag) -> Result, Box> { + let mut indexmap = IndexMap::new(); + + let path = std::env::current_dir()?; + indexmap.insert("cwd".to_string(), Value::path(path).tagged(&tag)); + + if let Some(home) = dirs::home_dir() { + indexmap.insert("home".to_string(), Value::path(home).tagged(&tag)); + } + + let config = config::default_path()?; + indexmap.insert("config".to_string(), Value::path(config).tagged(&tag)); + + let history = History::path(); + indexmap.insert("history".to_string(), Value::path(history).tagged(&tag)); + + let temp = std::env::temp_dir(); + indexmap.insert("temp".to_string(), Value::path(temp).tagged(&tag)); + + let mut dict = TaggedDictBuilder::new(&tag); + for v in std::env::vars() { + dict.insert(v.0, Value::string(v.1)); + } + if !dict.is_empty() { + indexmap.insert("vars".to_string(), dict.into_tagged_value()); + } + + Ok(Value::Row(Dictionary::from(indexmap)).tagged(&tag)) +} + +pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result { + let args = args.evaluate_once(registry)?; + + let mut env_out = VecDeque::new(); + let tag = args.call_info.name_tag.clone(); + + let value = get_environment(tag)?; + env_out.push_back(value); + + Ok(env_out.to_output_stream()) +} diff --git a/src/commands/exit.rs b/src/commands/exit.rs index feed8f7c4f..b7db7cc340 100644 --- a/src/commands/exit.rs +++ b/src/commands/exit.rs @@ -11,8 +11,7 @@ impl WholeStreamCommand for Exit { } fn signature(&self) -> Signature { - Signature::build("exit") - .switch("now") + Signature::build("exit").switch("now", "exit out of the shell immediately") } fn usage(&self) -> &str { diff --git a/src/commands/fetch.rs b/src/commands/fetch.rs new file mode 100644 index 0000000000..703c3279c5 --- /dev/null +++ b/src/commands/fetch.rs @@ -0,0 +1,288 @@ +use crate::commands::UnevaluatedCallInfo; +use crate::context::AnchorLocation; +use crate::data::meta::Span; +use crate::data::Value; +use crate::errors::ShellError; +use crate::parser::hir::SyntaxShape; +use crate::parser::registry::Signature; +use crate::prelude::*; +use mime::Mime; +use std::path::PathBuf; +use std::str::FromStr; +use surf::mime; +pub struct Fetch; + +impl PerItemCommand for Fetch { + fn name(&self) -> &str { + "fetch" + } + + fn signature(&self) -> Signature { + Signature::build(self.name()) + .required( + "path", + SyntaxShape::Path, + "the URL to fetch the contents from", + ) + .switch("raw", "fetch contents as text rather than a table") + } + + fn usage(&self) -> &str { + "Load from a URL into a cell, convert to table if possible (avoid by appending '--raw')" + } + + fn run( + &self, + call_info: &CallInfo, + registry: &CommandRegistry, + raw_args: &RawCommandArgs, + _input: Tagged, + ) -> Result { + run(call_info, registry, raw_args) + } +} + +fn run( + call_info: &CallInfo, + registry: &CommandRegistry, + raw_args: &RawCommandArgs, +) -> Result { + let path = match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error( + "No file or directory specified", + "for command", + &call_info.name_tag, + ) + })? { + file => file, + }; + let path_buf = path.as_path()?; + let path_str = path_buf.display().to_string(); + let path_span = path.tag.span; + let has_raw = call_info.args.has("raw"); + let registry = registry.clone(); + let raw_args = raw_args.clone(); + + let stream = async_stream! { + + let result = fetch(&path_str, path_span).await; + + if let Err(e) = result { + yield Err(e); + return; + } + let (file_extension, contents, contents_tag) = result.unwrap(); + + let file_extension = if has_raw { + None + } else { + // If the extension could not be determined via mimetype, try to use the path + // extension. Some file types do not declare their mimetypes (such as bson files). + file_extension.or(path_str.split('.').last().map(String::from)) + }; + + let tagged_contents = contents.tagged(&contents_tag); + + if let Some(extension) = file_extension { + let command_name = format!("from-{}", extension); + if let Some(converter) = registry.get_command(&command_name) { + let new_args = RawCommandArgs { + host: raw_args.host, + ctrl_c: raw_args.ctrl_c, + shell_manager: raw_args.shell_manager, + call_info: UnevaluatedCallInfo { + args: crate::parser::hir::Call { + head: raw_args.call_info.args.head, + positional: None, + named: None + }, + source: raw_args.call_info.source, + name_tag: raw_args.call_info.name_tag, + } + }; + let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false); + let result_vec: Vec> = result.drain_vec().await; + for res in result_vec { + match res { + Ok(ReturnSuccess::Value(Tagged { item: Value::Table(list), ..})) => { + for l in list { + yield Ok(ReturnSuccess::Value(l)); + } + } + Ok(ReturnSuccess::Value(Tagged { item, .. })) => { + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); + } + x => yield x, + } + } + } else { + yield ReturnSuccess::value(tagged_contents); + } + } else { + yield ReturnSuccess::value(tagged_contents); + } + }; + + Ok(stream.to_output_stream()) +} + +pub async fn fetch(location: &str, span: Span) -> Result<(Option, Value, Tag), ShellError> { + if let Err(_) = url::Url::parse(location) { + return Err(ShellError::labeled_error( + "Incomplete or incorrect url", + "expected a full url", + span, + )); + } + + let response = surf::get(location).await; + match response { + Ok(mut r) => match r.headers().get("content-type") { + Some(content_type) => { + let content_type = Mime::from_str(content_type).unwrap(); + match (content_type.type_(), content_type.subtype()) { + (mime::APPLICATION, mime::XML) => Ok(( + Some("xml".to_string()), + Value::string(r.body_string().await.map_err(|_| { + ShellError::labeled_error( + "Could not load text from remote url", + "could not load", + span, + ) + })?), + Tag { + span, + anchor: Some(AnchorLocation::Url(location.to_string())), + }, + )), + (mime::APPLICATION, mime::JSON) => Ok(( + Some("json".to_string()), + Value::string(r.body_string().await.map_err(|_| { + ShellError::labeled_error( + "Could not load text from remote url", + "could not load", + span, + ) + })?), + Tag { + span, + anchor: Some(AnchorLocation::Url(location.to_string())), + }, + )), + (mime::APPLICATION, mime::OCTET_STREAM) => { + let buf: Vec = r.body_bytes().await.map_err(|_| { + ShellError::labeled_error( + "Could not load binary file", + "could not load", + span, + ) + })?; + Ok(( + None, + Value::binary(buf), + Tag { + span, + anchor: Some(AnchorLocation::Url(location.to_string())), + }, + )) + } + (mime::IMAGE, mime::SVG) => Ok(( + Some("svg".to_string()), + Value::string(r.body_string().await.map_err(|_| { + ShellError::labeled_error( + "Could not load svg from remote url", + "could not load", + span, + ) + })?), + Tag { + span, + anchor: Some(AnchorLocation::Url(location.to_string())), + }, + )), + (mime::IMAGE, image_ty) => { + let buf: Vec = r.body_bytes().await.map_err(|_| { + ShellError::labeled_error( + "Could not load image file", + "could not load", + span, + ) + })?; + Ok(( + Some(image_ty.to_string()), + Value::binary(buf), + Tag { + span, + anchor: Some(AnchorLocation::Url(location.to_string())), + }, + )) + } + (mime::TEXT, mime::HTML) => Ok(( + Some("html".to_string()), + Value::string(r.body_string().await.map_err(|_| { + ShellError::labeled_error( + "Could not load text from remote url", + "could not load", + span, + ) + })?), + Tag { + span, + anchor: Some(AnchorLocation::Url(location.to_string())), + }, + )), + (mime::TEXT, mime::PLAIN) => { + let path_extension = url::Url::parse(location) + .unwrap() + .path_segments() + .and_then(|segments| segments.last()) + .and_then(|name| if name.is_empty() { None } else { Some(name) }) + .and_then(|name| { + PathBuf::from(name) + .extension() + .map(|name| name.to_string_lossy().to_string()) + }); + + Ok(( + path_extension, + Value::string(r.body_string().await.map_err(|_| { + ShellError::labeled_error( + "Could not load text from remote url", + "could not load", + span, + ) + })?), + Tag { + span, + anchor: Some(AnchorLocation::Url(location.to_string())), + }, + )) + } + (ty, sub_ty) => Ok(( + None, + Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)), + Tag { + span, + anchor: Some(AnchorLocation::Url(location.to_string())), + }, + )), + } + } + None => Ok(( + None, + Value::string(format!("No content type found")), + Tag { + span, + anchor: Some(AnchorLocation::Url(location.to_string())), + }, + )), + }, + Err(_) => { + return Err(ShellError::labeled_error( + "URL could not be opened", + "url not found", + span, + )); + } + } +} diff --git a/src/commands/first.rs b/src/commands/first.rs index 6381d5def6..a9a287978a 100644 --- a/src/commands/first.rs +++ b/src/commands/first.rs @@ -7,7 +7,7 @@ pub struct First; #[derive(Deserialize)] pub struct FirstArgs { - amount: Tagged, + rows: Option>, } impl WholeStreamCommand for First { @@ -16,8 +16,11 @@ impl WholeStreamCommand for First { } fn signature(&self) -> Signature { - Signature::build("first") - .required("amount", SyntaxType::Literal) + Signature::build("first").optional( + "rows", + SyntaxShape::Int, + "starting from the front, the number of rows to return", + ) } fn usage(&self) -> &str { @@ -34,8 +37,16 @@ impl WholeStreamCommand for First { } fn first( - FirstArgs { amount }: FirstArgs, + FirstArgs { rows }: FirstArgs, context: RunnableContext, ) -> Result { - Ok(OutputStream::from_input(context.input.values.take(*amount))) + let rows_desired = if let Some(quantity) = rows { + *quantity + } else { + 1 + }; + + Ok(OutputStream::from_input( + context.input.values.take(rows_desired), + )) } diff --git a/src/commands/from_array.rs b/src/commands/from_array.rs deleted file mode 100644 index 93ba87ecea..0000000000 --- a/src/commands/from_array.rs +++ /dev/null @@ -1,43 +0,0 @@ -use crate::commands::WholeStreamCommand; -use crate::object::Value; -use crate::prelude::*; - -pub struct FromArray; - -impl WholeStreamCommand for FromArray { - fn name(&self) -> &str { - "from-array" - } - - fn signature(&self) -> Signature { - Signature::build("from-array") - } - - fn usage(&self) -> &str { - "Expand an array/list into rows" - } - - fn run( - &self, - args: CommandArgs, - registry: &CommandRegistry, - ) -> Result { - from_array(args, registry) - } -} - -fn from_array(args: CommandArgs, _registry: &CommandRegistry) -> Result { - let stream = args - .input - .values - .map(|item| match item { - Tagged { - item: Value::List(vec), - .. - } => VecDeque::from(vec), - x => VecDeque::from(vec![x]), - }) - .flatten(); - - Ok(stream.to_output_stream()) -} diff --git a/src/commands/from_bson.rs b/src/commands/from_bson.rs index 492553e9d9..469e15f35e 100644 --- a/src/commands/from_bson.rs +++ b/src/commands/from_bson.rs @@ -1,6 +1,6 @@ use crate::commands::WholeStreamCommand; +use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::errors::ExpectedRange; -use crate::object::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; use bson::{decode_document, spec::BinarySubtype, Bson}; use std::str::FromStr; @@ -33,7 +33,7 @@ fn bson_array(input: &Vec, tag: Tag) -> Result>, ShellEr let mut out = vec![]; for value in input { - out.push(convert_bson_value_to_nu_value(value, tag)?); + out.push(convert_bson_value_to_nu_value(value, &tag)?); } Ok(out) @@ -46,100 +46,100 @@ fn convert_bson_value_to_nu_value( let tag = tag.into(); Ok(match v { - Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(tag), - Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), - Bson::Array(a) => Value::List(bson_array(a, tag)?).tagged(tag), + Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag), + Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag), + Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag), Bson::Document(doc) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); for (k, v) in doc.iter() { - collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, tag)?); + collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?); } collected.into_tagged_value() } - Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag), - Bson::Null => Value::Primitive(Primitive::Nothing).tagged(tag), + Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag), + Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag), Bson::RegExp(r, opts) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$regex".to_string(), - Value::Primitive(Primitive::String(String::from(r))).tagged(tag), + Value::Primitive(Primitive::String(String::from(r))).tagged(&tag), ); collected.insert_tagged( "$options".to_string(), - Value::Primitive(Primitive::String(String::from(opts))).tagged(tag), + Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag), ); collected.into_tagged_value() } - Bson::I32(n) => Value::number(n).tagged(tag), - Bson::I64(n) => Value::number(n).tagged(tag), + Bson::I32(n) => Value::number(n).tagged(&tag), + Bson::I64(n) => Value::number(n).tagged(&tag), Bson::Decimal128(n) => { // TODO: this really isn't great, and we should update this to do a higher // fidelity translation let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| { ShellError::range_error( ExpectedRange::BigDecimal, - &n.tagged(tag), + &n.tagged(&tag), format!("converting BSON Decimal128 to BigDecimal"), ) })?; - Value::Primitive(Primitive::Decimal(decimal)).tagged(tag) + Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag) } Bson::JavaScriptCode(js) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$javascript".to_string(), - Value::Primitive(Primitive::String(String::from(js))).tagged(tag), + Value::Primitive(Primitive::String(String::from(js))).tagged(&tag), ); collected.into_tagged_value() } Bson::JavaScriptCodeWithScope(js, doc) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$javascript".to_string(), - Value::Primitive(Primitive::String(String::from(js))).tagged(tag), + Value::Primitive(Primitive::String(String::from(js))).tagged(&tag), ); collected.insert_tagged( "$scope".to_string(), - convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?, + convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?, ); collected.into_tagged_value() } Bson::TimeStamp(ts) => { - let mut collected = TaggedDictBuilder::new(tag); - collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(tag)); + let mut collected = TaggedDictBuilder::new(tag.clone()); + collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag)); collected.into_tagged_value() } Bson::Binary(bst, bytes) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$binary_subtype".to_string(), match bst { BinarySubtype::UserDefined(u) => Value::number(u), _ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))), } - .tagged(tag), + .tagged(&tag), ); collected.insert_tagged( "$binary".to_string(), - Value::Binary(bytes.to_owned()).tagged(tag), + Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag), ); collected.into_tagged_value() } Bson::ObjectId(obj_id) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$object_id".to_string(), - Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag), + Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag), ); collected.into_tagged_value() } - Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag), + Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag), Bson::Symbol(s) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(tag.clone()); collected.insert_tagged( "$symbol".to_string(), - Value::Primitive(Primitive::String(String::from(s))).tagged(tag), + Value::Primitive(Primitive::String(String::from(s))).tagged(&tag), ); collected.into_tagged_value() } @@ -198,34 +198,34 @@ pub fn from_bson_bytes_to_value( fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let span = args.name_span(); + let tag = args.name_tag(); let input = args.input; - let stream = async_stream_block! { + let stream = async_stream! { let values: Vec> = input.values.collect().await; for value in values { let value_tag = value.tag(); match value.item { - Value::Binary(vb) => - match from_bson_bytes_to_value(vb, span) { + Value::Primitive(Primitive::Binary(vb)) => + match from_bson_bytes_to_value(vb, tag.clone()) { Ok(x) => yield ReturnSuccess::value(x), Err(_) => { yield Err(ShellError::labeled_error_with_secondary( "Could not parse as BSON", "input cannot be parsed as BSON", - span, + tag.clone(), "value originates from here", - value_tag.span, + value_tag, )) } } _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - span, + tag.clone(), "value originates from here", - value_tag.span, + value_tag, )), } diff --git a/src/commands/from_csv.rs b/src/commands/from_csv.rs index 9f4a96da3e..7442a07fc9 100644 --- a/src/commands/from_csv.rs +++ b/src/commands/from_csv.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, TaggedDictBuilder, Value}; +use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; use csv::ReaderBuilder; @@ -17,7 +17,7 @@ impl WholeStreamCommand for FromCSV { fn signature(&self) -> Signature { Signature::build("from-csv") - .switch("headerless") + .switch("headerless", "don't treat the first row as column names") } fn usage(&self) -> &str { @@ -63,12 +63,12 @@ pub fn from_csv_string_to_value( if let Some(row_values) = iter.next() { let row_values = row_values?; - let mut row = TaggedDictBuilder::new(tag); + let mut row = TaggedDictBuilder::new(tag.clone()); for (idx, entry) in row_values.iter().enumerate() { row.insert_tagged( fields.get(idx).unwrap(), - Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), + Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), ); } @@ -78,7 +78,7 @@ pub fn from_csv_string_to_value( } } - Ok(Tagged::from_item(Value::List(rows), tag)) + Ok(Value::Table(rows).tagged(&tag)) } fn from_csv( @@ -87,9 +87,9 @@ fn from_csv( }: FromCSVArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { - let name_span = name; + let name_tag = name; - let stream = async_stream_block! { + let stream = async_stream! { let values: Vec> = input.values.collect().await; let mut concat_string = String::new(); @@ -97,7 +97,7 @@ fn from_csv( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -106,17 +106,17 @@ fn from_csv( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name_span, + name_tag.clone(), "value originates from here", - value_tag.span, + value_tag.clone(), )), } } - match from_csv_string_to_value(concat_string, skip_headers, name_span) { + match from_csv_string_to_value(concat_string, skip_headers, name_tag.clone()) { Ok(x) => match x { - Tagged { item: Value::List(list), .. } => { + Tagged { item: Value::Table(list), .. } => { for l in list { yield ReturnSuccess::value(l); } @@ -127,9 +127,9 @@ fn from_csv( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as CSV", "input cannot be parsed as CSV", - name_span, + name_tag.clone(), "value originates from here", - last_tag.span, + last_tag.clone(), )) } , } diff --git a/src/commands/from_ini.rs b/src/commands/from_ini.rs index 8495c4b221..e55bbd45c4 100644 --- a/src/commands/from_ini.rs +++ b/src/commands/from_ini.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, TaggedDictBuilder, Value}; +use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; use std::collections::HashMap; @@ -45,10 +45,13 @@ fn convert_ini_top_to_nu_value( tag: impl Into, ) -> Tagged { let tag = tag.into(); - let mut top_level = TaggedDictBuilder::new(tag); + let mut top_level = TaggedDictBuilder::new(tag.clone()); for (key, value) in v.iter() { - top_level.insert_tagged(key.clone(), convert_ini_second_to_nu_value(value, tag)); + top_level.insert_tagged( + key.clone(), + convert_ini_second_to_nu_value(value, tag.clone()), + ); } top_level.into_tagged_value() @@ -64,10 +67,10 @@ pub fn from_ini_string_to_value( fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let span = args.name_span(); + let tag = args.name_tag(); let input = args.input; - let stream = async_stream_block! { + let stream = async_stream! { let values: Vec> = input.values.collect().await; let mut concat_string = String::new(); @@ -75,7 +78,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -84,17 +87,17 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - span, + &tag, "value originates from here", - value_tag.span, + &value_tag, )), } } - match from_ini_string_to_value(concat_string, span) { + match from_ini_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { - Tagged { item: Value::List(list), .. } => { + Tagged { item: Value::Table(list), .. } => { for l in list { yield ReturnSuccess::value(l); } @@ -105,9 +108,9 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result Signature { - Signature::build("from-json") - .switch("objects") + Signature::build("from-json").switch("objects", "treat each line as a separate value") } fn usage(&self) -> &str { @@ -36,24 +35,24 @@ fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into) - let tag = tag.into(); match v { - serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag), - serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(tag), - serde_hjson::Value::F64(n) => Value::number(n).tagged(tag), - serde_hjson::Value::U64(n) => Value::number(n).tagged(tag), - serde_hjson::Value::I64(n) => Value::number(n).tagged(tag), + serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag), + serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag), + serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag), + serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag), + serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag), serde_hjson::Value::String(s) => { - Value::Primitive(Primitive::String(String::from(s))).tagged(tag) + Value::Primitive(Primitive::String(String::from(s))).tagged(&tag) } - serde_hjson::Value::Array(a) => Value::List( + serde_hjson::Value::Array(a) => Value::Table( a.iter() - .map(|x| convert_json_value_to_nu_value(x, tag)) + .map(|x| convert_json_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), serde_hjson::Value::Object(o) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in o.iter() { - collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag)); } collected.into_tagged_value() @@ -73,9 +72,9 @@ fn from_json( FromJSONArgs { objects }: FromJSONArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { - let name_span = name; + let name_tag = name; - let stream = async_stream_block! { + let stream = async_stream! { let values: Vec> = input.values.collect().await; let mut concat_string = String::new(); @@ -83,7 +82,7 @@ fn from_json( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -92,9 +91,9 @@ fn from_json( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name_span, + &name_tag, "value originates from here", - value_tag.span, + &value_tag, )), } @@ -107,26 +106,26 @@ fn from_json( continue; } - match from_json_string_to_value(json_str.to_string(), name_span) { + match from_json_string_to_value(json_str.to_string(), &name_tag) { Ok(x) => yield ReturnSuccess::value(x), Err(_) => { - if let Some(last_tag) = latest_tag { + if let Some(ref last_tag) = latest_tag { yield Err(ShellError::labeled_error_with_secondary( "Could nnot parse as JSON", "input cannot be parsed as JSON", - name_span, + &name_tag, "value originates from here", - last_tag.span)) + last_tag)) } } } } } else { - match from_json_string_to_value(concat_string, name_span) { + match from_json_string_to_value(concat_string, name_tag.clone()) { Ok(x) => match x { - Tagged { item: Value::List(list), .. } => { + Tagged { item: Value::Table(list), .. } => { for l in list { yield ReturnSuccess::value(l); } @@ -138,9 +137,9 @@ fn from_json( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as JSON", "input cannot be parsed as JSON", - name_span, + name_tag, "value originates from here", - last_tag.span)) + last_tag)) } } } diff --git a/src/commands/from_sqlite.rs b/src/commands/from_sqlite.rs index 9aca8a222e..7b93dc1633 100644 --- a/src/commands/from_sqlite.rs +++ b/src/commands/from_sqlite.rs @@ -1,6 +1,6 @@ use crate::commands::WholeStreamCommand; +use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::errors::ShellError; -use crate::object::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; use rusqlite::{types::ValueRef, Connection, Row, NO_PARAMS}; use std::io::Write; @@ -76,11 +76,11 @@ pub fn convert_sqlite_file_to_nu_value( "table_name".to_string(), Value::Primitive(Primitive::String(table_name)).tagged(tag.clone()), ); - meta_dict.insert_tagged("table_values", Value::List(out).tagged(tag.clone())); + meta_dict.insert_tagged("table_values", Value::Table(out).tagged(tag.clone())); meta_out.push(meta_dict.into_tagged_value()); } let tag = tag.into(); - Ok(Value::List(meta_out).tagged(tag)) + Ok(Value::Table(meta_out).tagged(tag)) } fn convert_sqlite_row_to_nu_value( @@ -106,7 +106,7 @@ fn convert_sqlite_value_to_nu_value(value: ValueRef, tag: impl Into + Clone // this unwrap is safe because we know the ValueRef is Text. Value::Primitive(Primitive::String(t.as_str().unwrap().to_string())).tagged(tag) } - ValueRef::Blob(u) => Value::Binary(u.to_owned()).tagged(tag), + ValueRef::Blob(u) => Value::binary(u.to_owned()).tagged(tag), } } @@ -128,19 +128,19 @@ pub fn from_sqlite_bytes_to_value( fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let span = args.name_span(); + let tag = args.name_tag(); let input = args.input; - let stream = async_stream_block! { + let stream = async_stream! { let values: Vec> = input.values.collect().await; for value in values { let value_tag = value.tag(); match value.item { - Value::Binary(vb) => - match from_sqlite_bytes_to_value(vb, span) { + Value::Primitive(Primitive::Binary(vb)) => + match from_sqlite_bytes_to_value(vb, tag.clone()) { Ok(x) => match x { - Tagged { item: Value::List(list), .. } => { + Tagged { item: Value::Table(list), .. } => { for l in list { yield ReturnSuccess::value(l); } @@ -151,18 +151,18 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - span, + &tag, "value originates from here", - value_tag.span, + value_tag, )), } diff --git a/src/commands/from_ssv.rs b/src/commands/from_ssv.rs new file mode 100644 index 0000000000..aaf6018fb7 --- /dev/null +++ b/src/commands/from_ssv.rs @@ -0,0 +1,320 @@ +use crate::commands::WholeStreamCommand; +use crate::data::{Primitive, TaggedDictBuilder, Value}; +use crate::prelude::*; + +pub struct FromSSV; + +#[derive(Deserialize)] +pub struct FromSSVArgs { + headerless: bool, + #[serde(rename(deserialize = "minimum-spaces"))] + minimum_spaces: Option>, +} + +const STRING_REPRESENTATION: &str = "from-ssv"; +const DEFAULT_MINIMUM_SPACES: usize = 2; + +impl WholeStreamCommand for FromSSV { + fn name(&self) -> &str { + STRING_REPRESENTATION + } + + fn signature(&self) -> Signature { + Signature::build(STRING_REPRESENTATION) + .switch("headerless", "don't treat the first row as column names") + .named( + "minimum-spaces", + SyntaxShape::Int, + "the mininum spaces to separate columns", + ) + } + + fn usage(&self) -> &str { + "Parse text as space-separated values and create a table. The default minimum number of spaces counted as a separator is 2." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, from_ssv)?.run() + } +} + +fn string_to_table( + s: &str, + headerless: bool, + split_at: usize, +) -> Option>> { + let mut lines = s.lines().filter(|l| !l.trim().is_empty()); + let separator = " ".repeat(std::cmp::max(split_at, 1)); + + let headers_raw = lines.next()?; + + let headers = headers_raw + .trim() + .split(&separator) + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(|s| (headers_raw.find(s).unwrap(), s.to_owned())); + + let columns = if headerless { + headers + .enumerate() + .map(|(header_no, (string_index, _))| { + (string_index, format!("Column{}", header_no + 1)) + }) + .collect::>() + } else { + headers.collect::>() + }; + + Some( + lines + .map(|l| { + columns + .iter() + .enumerate() + .filter_map(|(i, (start, col))| { + (match columns.get(i + 1) { + Some((end, _)) => l.get(*start..*end), + None => l.get(*start..), + }) + .and_then(|s| Some((col.clone(), String::from(s.trim())))) + }) + .collect() + }) + .collect(), + ) +} + +fn from_ssv_string_to_value( + s: &str, + headerless: bool, + split_at: usize, + tag: impl Into, +) -> Option> { + let tag = tag.into(); + let rows = string_to_table(s, headerless, split_at)? + .iter() + .map(|row| { + let mut tagged_dict = TaggedDictBuilder::new(&tag); + for (col, entry) in row { + tagged_dict.insert_tagged( + col, + Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), + ) + } + tagged_dict.into_tagged_value() + }) + .collect(); + + Some(Value::Table(rows).tagged(&tag)) +} + +fn from_ssv( + FromSSVArgs { + headerless, + minimum_spaces, + }: FromSSVArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + let mut concat_string = String::new(); + let mut latest_tag: Option = None; + let split_at = match minimum_spaces { + Some(number) => number.item, + None => DEFAULT_MINIMUM_SPACES + }; + + for value in values { + let value_tag = value.tag(); + latest_tag = Some(value_tag.clone()); + match value.item { + Value::Primitive(Primitive::String(s)) => { + concat_string.push_str(&s); + } + _ => yield Err(ShellError::labeled_error_with_secondary ( + "Expected a string from pipeline", + "requires string input", + &name, + "value originates from here", + &value_tag + )), + } + } + + match from_ssv_string_to_value(&concat_string, headerless, split_at, name.clone()) { + Some(x) => match x { + Tagged { item: Value::Table(list), ..} => { + for l in list { yield ReturnSuccess::value(l) } + } + x => yield ReturnSuccess::value(x) + }, + None => if let Some(tag) = latest_tag { + yield Err(ShellError::labeled_error_with_secondary( + "Could not parse as SSV", + "input cannot be parsed ssv", + &name, + "value originates from here", + &tag, + )) + }, + } + }; + + Ok(stream.to_output_stream()) +} + +#[cfg(test)] +mod tests { + use super::*; + fn owned(x: &str, y: &str) -> (String, String) { + (String::from(x), String::from(y)) + } + + #[test] + fn it_trims_empty_and_whitespace_only_lines() { + let input = r#" + + a b + + 1 2 + + 3 4 + "#; + let result = string_to_table(input, false, 1); + assert_eq!( + result, + Some(vec![ + vec![owned("a", "1"), owned("b", "2")], + vec![owned("a", "3"), owned("b", "4")] + ]) + ); + } + + #[test] + fn it_deals_with_single_column_input() { + let input = r#" + a + 1 + 2 + "#; + let result = string_to_table(input, false, 1); + assert_eq!( + result, + Some(vec![vec![owned("a", "1")], vec![owned("a", "2")]]) + ); + } + + #[test] + fn it_ignores_headers_when_headerless() { + let input = r#" + a b + 1 2 + 3 4 + "#; + let result = string_to_table(input, true, 1); + assert_eq!( + result, + Some(vec![ + vec![owned("Column1", "1"), owned("Column2", "2")], + vec![owned("Column1", "3"), owned("Column2", "4")] + ]) + ); + } + + #[test] + fn it_returns_none_given_an_empty_string() { + let input = ""; + let result = string_to_table(input, true, 1); + assert!(result.is_none()); + } + + #[test] + fn it_allows_a_predefined_number_of_spaces() { + let input = r#" + column a column b + entry 1 entry number 2 + 3 four + "#; + + let result = string_to_table(input, false, 3); + assert_eq!( + result, + Some(vec![ + vec![ + owned("column a", "entry 1"), + owned("column b", "entry number 2") + ], + vec![owned("column a", "3"), owned("column b", "four")] + ]) + ); + } + + #[test] + fn it_trims_remaining_separator_space() { + let input = r#" + colA colB colC + val1 val2 val3 + "#; + + let trimmed = |s: &str| s.trim() == s; + + let result = string_to_table(input, false, 2).unwrap(); + assert!(result + .iter() + .all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b)))) + } + + #[test] + fn it_keeps_empty_columns() { + let input = r#" + colA col B col C + val2 val3 + val4 val 5 val 6 + val7 val8 + "#; + + let result = string_to_table(input, false, 2).unwrap(); + assert_eq!( + result, + vec![ + vec![ + owned("colA", ""), + owned("col B", "val2"), + owned("col C", "val3") + ], + vec![ + owned("colA", "val4"), + owned("col B", "val 5"), + owned("col C", "val 6") + ], + vec![ + owned("colA", "val7"), + owned("col B", ""), + owned("col C", "val8") + ], + ] + ) + } + + #[test] + fn it_uses_the_full_final_column() { + let input = r#" + colA col B + val1 val2 trailing value that should be included + "#; + + let result = string_to_table(input, false, 2).unwrap(); + assert_eq!( + result, + vec![vec![ + owned("colA", "val1"), + owned("col B", "val2 trailing value that should be included"), + ],] + ) + } +} diff --git a/src/commands/from_toml.rs b/src/commands/from_toml.rs index 3d1d92fb67..2cfd059165 100644 --- a/src/commands/from_toml.rs +++ b/src/commands/from_toml.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, TaggedDictBuilder, Value}; +use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; pub struct FromTOML; @@ -34,9 +34,9 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into) -> T toml::Value::Integer(n) => Value::number(n).tagged(tag), toml::Value::Float(n) => Value::number(n).tagged(tag), toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), - toml::Value::Array(a) => Value::List( + toml::Value::Array(a) => Value::Table( a.iter() - .map(|x| convert_toml_value_to_nu_value(x, tag)) + .map(|x| convert_toml_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), @@ -44,10 +44,10 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into) -> T Value::Primitive(Primitive::String(dt.to_string())).tagged(tag) } toml::Value::Table(t) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in t.iter() { - collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag)); } collected.into_tagged_value() @@ -68,10 +68,10 @@ pub fn from_toml( registry: &CommandRegistry, ) -> Result { let args = args.evaluate_once(registry)?; - let span = args.name_span(); + let tag = args.name_tag(); let input = args.input; - let stream = async_stream_block! { + let stream = async_stream! { let values: Vec> = input.values.collect().await; let mut concat_string = String::new(); @@ -79,7 +79,7 @@ pub fn from_toml( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -88,17 +88,17 @@ pub fn from_toml( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - span, + &tag, "value originates from here", - value_tag.span, + &value_tag, )), } } - match from_toml_string_to_value(concat_string, span) { + match from_toml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { - Tagged { item: Value::List(list), .. } => { + Tagged { item: Value::Table(list), .. } => { for l in list { yield ReturnSuccess::value(l); } @@ -109,9 +109,9 @@ pub fn from_toml( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as TOML", "input cannot be parsed as TOML", - span, + &tag, "value originates from here", - last_tag.span, + last_tag, )) } , } diff --git a/src/commands/from_tsv.rs b/src/commands/from_tsv.rs index 366621d435..2284e95573 100644 --- a/src/commands/from_tsv.rs +++ b/src/commands/from_tsv.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, TaggedDictBuilder, Value}; +use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; use csv::ReaderBuilder; @@ -16,7 +16,8 @@ impl WholeStreamCommand for FromTSV { } fn signature(&self) -> Signature { - Signature::build("from-tsv").switch("headerless") + Signature::build("from-tsv") + .switch("headerless", "don't treat the first row as column names") } fn usage(&self) -> &str { @@ -63,12 +64,12 @@ pub fn from_tsv_string_to_value( if let Some(row_values) = iter.next() { let row_values = row_values?; - let mut row = TaggedDictBuilder::new(tag); + let mut row = TaggedDictBuilder::new(&tag); for (idx, entry) in row_values.iter().enumerate() { row.insert_tagged( fields.get(idx).unwrap(), - Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), + Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag), ); } @@ -78,7 +79,7 @@ pub fn from_tsv_string_to_value( } } - Ok(Tagged::from_item(Value::List(rows), tag)) + Ok(Value::Table(rows).tagged(&tag)) } fn from_tsv( @@ -87,9 +88,9 @@ fn from_tsv( }: FromTSVArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { - let name_span = name; + let name_tag = name; - let stream = async_stream_block! { + let stream = async_stream! { let values: Vec> = input.values.collect().await; let mut concat_string = String::new(); @@ -97,7 +98,7 @@ fn from_tsv( for value in values { let value_tag = value.tag(); - latest_tag = Some(value_tag); + latest_tag = Some(value_tag.clone()); match value.item { Value::Primitive(Primitive::String(s)) => { concat_string.push_str(&s); @@ -106,17 +107,17 @@ fn from_tsv( _ => yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name_span, + &name_tag, "value originates from here", - value_tag.span, + &value_tag, )), } } - match from_tsv_string_to_value(concat_string, skip_headers, name_span) { + match from_tsv_string_to_value(concat_string, skip_headers, name_tag.clone()) { Ok(x) => match x { - Tagged { item: Value::List(list), .. } => { + Tagged { item: Value::Table(list), .. } => { for l in list { yield ReturnSuccess::value(l); } @@ -127,9 +128,9 @@ fn from_tsv( yield Err(ShellError::labeled_error_with_secondary( "Could not parse as TSV", "input cannot be parsed as TSV", - name_span, + &name_tag, "value originates from here", - last_tag.span, + &last_tag, )) } , } diff --git a/src/commands/from_url.rs b/src/commands/from_url.rs new file mode 100644 index 0000000000..ad23ea5b53 --- /dev/null +++ b/src/commands/from_url.rs @@ -0,0 +1,85 @@ +use crate::commands::WholeStreamCommand; +use crate::data::{Primitive, TaggedDictBuilder, Value}; +use crate::prelude::*; + +pub struct FromURL; + +impl WholeStreamCommand for FromURL { + fn name(&self) -> &str { + "from-url" + } + + fn signature(&self) -> Signature { + Signature::build("from-url") + } + + fn usage(&self) -> &str { + "Parse url-encoded string as a table." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + from_url(args, registry) + } +} + +fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result { + let args = args.evaluate_once(registry)?; + let tag = args.name_tag(); + let input = args.input; + + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + + let mut concat_string = String::new(); + let mut latest_tag: Option = None; + + for value in values { + let value_tag = value.tag(); + latest_tag = Some(value_tag.clone()); + match value.item { + Value::Primitive(Primitive::String(s)) => { + concat_string.push_str(&s); + } + _ => yield Err(ShellError::labeled_error_with_secondary( + "Expected a string from pipeline", + "requires string input", + &tag, + "value originates from here", + &value_tag, + )), + + } + } + + let result = serde_urlencoded::from_str::>(&concat_string); + + match result { + Ok(result) => { + let mut row = TaggedDictBuilder::new(tag); + + for (k,v) in result { + row.insert(k, Value::string(v)); + } + + yield ReturnSuccess::value(row.into_tagged_value()); + } + _ => { + if let Some(last_tag) = latest_tag { + yield Err(ShellError::labeled_error_with_secondary( + "String not compatible with url-encoding", + "input not url-encoded", + tag, + "value originates from here", + last_tag, + )); + } + } + } + }; + + Ok(stream.to_output_stream()) +} diff --git a/src/commands/from_xml.rs b/src/commands/from_xml.rs index 4e4be72bc2..e99e5664e5 100644 --- a/src/commands/from_xml.rs +++ b/src/commands/from_xml.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, TaggedDictBuilder, Value}; +use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; pub struct FromXML; @@ -34,7 +34,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into) let mut children_values = vec![]; for c in n.children() { - children_values.push(from_node_to_value(&c, tag)); + children_values.push(from_node_to_value(&c, &tag)); } let children_values: Vec> = children_values @@ -55,7 +55,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into) .collect(); let mut collected = TaggedDictBuilder::new(tag); - collected.insert(name.clone(), Value::List(children_values)); + collected.insert(name.clone(), Value::Table(children_values)); collected.into_tagged_value() } else if n.is_comment() { @@ -83,10 +83,10 @@ pub fn from_xml_string_to_value( fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let span = args.name_span(); + let tag = args.name_tag(); let input = args.input; - let stream = async_stream_block! { + let stream = async_stream! { let values: Vec> = input.values.collect().await; let mut concat_string = String::new(); @@ -94,7 +94,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -103,17 +103,17 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - span, + &tag, "value originates from here", - value_tag.span, + &value_tag, )), } } - match from_xml_string_to_value(concat_string, span) { + match from_xml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { - Tagged { item: Value::List(list), .. } => { + Tagged { item: Value::Table(list), .. } => { for l in list { yield ReturnSuccess::value(l); } @@ -124,9 +124,9 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result Result) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn parse(xml: &str) -> Tagged { + from_xml::from_xml_string_to_value(xml.to_string(), Tag::unknown()).unwrap() + } + + #[test] + fn parses_empty_element() { + let source = ""; + + assert_eq!( + parse(source), + row(indexmap! { + "nu".into() => table(&vec![]) + }) + ); + } + + #[test] + fn parses_element_with_text() { + let source = "La era de los tres caballeros"; + + assert_eq!( + parse(source), + row(indexmap! { + "nu".into() => table(&vec![string("La era de los tres caballeros")]) + }) + ); + } + + #[test] + fn parses_element_with_elements() { + let source = "\ + + Andrés + Jonathan + Yehuda +"; + + assert_eq!( + parse(source), + row(indexmap! { + "nu".into() => table(&vec![ + row(indexmap! {"dev".into() => table(&vec![string("Andrés")])}), + row(indexmap! {"dev".into() => table(&vec![string("Jonathan")])}), + row(indexmap! {"dev".into() => table(&vec![string("Yehuda")])}) + ]) + }) + ); + } +} diff --git a/src/commands/from_yaml.rs b/src/commands/from_yaml.rs index 3bac9d3f81..3ba8033ddf 100644 --- a/src/commands/from_yaml.rs +++ b/src/commands/from_yaml.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, TaggedDictBuilder, Value}; +use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; pub struct FromYAML; @@ -62,19 +62,19 @@ fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into) -> Value::Primitive(Primitive::from(n.as_f64().unwrap())).tagged(tag) } serde_yaml::Value::String(s) => Value::string(s).tagged(tag), - serde_yaml::Value::Sequence(a) => Value::List( + serde_yaml::Value::Sequence(a) => Value::Table( a.iter() - .map(|x| convert_yaml_value_to_nu_value(x, tag)) + .map(|x| convert_yaml_value_to_nu_value(x, &tag)) .collect(), ) .tagged(tag), serde_yaml::Value::Mapping(t) => { - let mut collected = TaggedDictBuilder::new(tag); + let mut collected = TaggedDictBuilder::new(&tag); for (k, v) in t.iter() { match k { serde_yaml::Value::String(k) => { - collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, tag)); + collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag)); } _ => unimplemented!("Unknown key type"), } @@ -97,10 +97,10 @@ pub fn from_yaml_string_to_value( fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let span = args.name_span(); + let tag = args.name_tag(); let input = args.input; - let stream = async_stream_block! { + let stream = async_stream! { let values: Vec> = input.values.collect().await; let mut concat_string = String::new(); @@ -108,7 +108,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result { concat_string.push_str(&s); @@ -117,17 +117,17 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result yield Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - span, + &tag, "value originates from here", - value_tag.span, + &value_tag, )), } } - match from_yaml_string_to_value(concat_string, span) { + match from_yaml_string_to_value(concat_string, tag.clone()) { Ok(x) => match x { - Tagged { item: Value::List(list), .. } => { + Tagged { item: Value::Table(list), .. } => { for l in list { yield ReturnSuccess::value(l); } @@ -138,9 +138,9 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result>, + member: ColumnPath, + rest: Vec, } impl WholeStreamCommand for Get { @@ -16,7 +19,16 @@ impl WholeStreamCommand for Get { } fn signature(&self) -> Signature { - Signature::build("get").rest(SyntaxType::Member) + Signature::build("get") + .required( + "member", + SyntaxShape::ColumnPath, + "the path to the data to get", + ) + .rest( + SyntaxShape::ColumnPath, + "optionally return additional data by path", + ) } fn usage(&self) -> &str { @@ -32,24 +44,41 @@ impl WholeStreamCommand for Get { } } -fn get_member(path: &Tagged, obj: &Tagged) -> Result, ShellError> { +pub type ColumnPath = Vec>; + +pub fn get_column_path( + path: &ColumnPath, + obj: &Tagged, +) -> Result, ShellError> { let mut current = Some(obj); - for p in path.split(".") { + for p in path.iter() { if let Some(obj) = current { - current = match obj.get_data_by_key(p) { + current = match obj.get_data_by_key(&p) { Some(v) => Some(v), None => // Before we give up, see if they gave us a path that matches a field name by itself { - match obj.get_data_by_key(&path.item) { - Some(v) => return Ok(v.clone()), - None => { - return Err(ShellError::labeled_error( - "Unknown column", - "table missing column", - path.span(), - )); - } + let possibilities = obj.data_descriptors(); + + let mut possible_matches: Vec<_> = possibilities + .iter() + .map(|x| (natural::distance::levenshtein_distance(x, &p), x)) + .collect(); + + possible_matches.sort(); + + if possible_matches.len() > 0 { + return Err(ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + tag_for_tagged_list(path.iter().map(|p| p.tag())), + )); + } else { + return Err(ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + tag_for_tagged_list(path.iter().map(|p| p.tag())), + )); } } } @@ -58,22 +87,46 @@ fn get_member(path: &Tagged, obj: &Tagged) -> Result Ok(v.clone()), - None => Ok(Value::nothing().tagged(obj.tag)), + None => match obj { + // If its None check for certain values. + Tagged { + item: Value::Primitive(Primitive::String(_)), + .. + } => Ok(obj.clone()), + Tagged { + item: Value::Primitive(Primitive::Path(_)), + .. + } => Ok(obj.clone()), + _ => Ok(Value::nothing().tagged(&obj.tag)), + }, } } pub fn get( - GetArgs { rest: fields }: GetArgs, + GetArgs { + member, + rest: fields, + }: GetArgs, RunnableContext { input, .. }: RunnableContext, ) -> Result { + trace!("get {:?} {:?}", member, fields); + let stream = input .values .map(move |item| { let mut result = VecDeque::new(); - for field in &fields { - match get_member(field, &item) { + + let member = vec![member.clone()]; + + let fields = vec![&member, &fields] + .into_iter() + .flatten() + .collect::>(); + + for column_path in &fields { + match get_column_path(column_path, &item) { Ok(Tagged { - item: Value::List(l), + item: Value::Table(l), .. }) => { for item in l { diff --git a/src/commands/group_by.rs b/src/commands/group_by.rs new file mode 100644 index 0000000000..f36d3f57dd --- /dev/null +++ b/src/commands/group_by.rs @@ -0,0 +1,94 @@ +use crate::commands::WholeStreamCommand; +use crate::data::TaggedDictBuilder; +use crate::errors::ShellError; +use crate::prelude::*; + +pub struct GroupBy; + +#[derive(Deserialize)] +pub struct GroupByArgs { + column_name: Tagged, +} + +impl WholeStreamCommand for GroupBy { + fn name(&self) -> &str { + "group-by" + } + + fn signature(&self) -> Signature { + Signature::build("group-by").required( + "column_name", + SyntaxShape::String, + "the name of the column to group by", + ) + } + + fn usage(&self) -> &str { + "Creates a new table with the data from the table rows grouped by the column given." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, group_by)?.run() + } +} + +fn group_by( + GroupByArgs { column_name }: GroupByArgs, + RunnableContext { input, name, .. }: RunnableContext, +) -> Result { + let stream = async_stream! { + let values: Vec> = input.values.collect().await; + let mut groups = indexmap::IndexMap::new(); + + for value in values { + let group_key = value.get_data_by_key(&column_name.item); + + if group_key.is_none() { + + let possibilities = value.data_descriptors(); + + let mut possible_matches: Vec<_> = possibilities + .iter() + .map(|x| (natural::distance::levenshtein_distance(x, &column_name.item), x)) + .collect(); + + possible_matches.sort(); + + let err = { + if possible_matches.len() > 0 { + ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + &column_name.tag,) + } else { + ShellError::labeled_error( + "Unknown column", + "row does not contain this column", + &column_name.tag, + ) + } + }; + + yield Err(err) + } else { + let group_key = group_key.unwrap().as_string()?; + let mut group = groups.entry(group_key).or_insert(vec![]); + group.push(value); + } + } + + let mut out = TaggedDictBuilder::new(name.clone()); + + for (k,v) in groups.iter() { + out.insert(k, Value::table(v)); + } + + yield ReturnSuccess::value(out) + }; + + Ok(stream.to_output_stream()) +} diff --git a/src/commands/help.rs b/src/commands/help.rs index def2f33385..d5f755f67d 100644 --- a/src/commands/help.rs +++ b/src/commands/help.rs @@ -1,7 +1,7 @@ -use crate::commands::command::CommandAction; use crate::commands::PerItemCommand; +use crate::data::{command_dict, TaggedDictBuilder}; use crate::errors::ShellError; -use crate::parser::registry; +use crate::parser::registry::{self, NamedType, PositionalType}; use crate::prelude::*; pub struct Help; @@ -12,7 +12,7 @@ impl PerItemCommand for Help { } fn signature(&self) -> registry::Signature { - Signature::build("help").rest(SyntaxType::Any) + Signature::build("help").rest(SyntaxShape::Any, "the name of command(s) to get help on") } fn usage(&self) -> &str { @@ -22,34 +22,149 @@ impl PerItemCommand for Help { fn run( &self, call_info: &CallInfo, - _registry: &CommandRegistry, + registry: &CommandRegistry, _raw_args: &RawCommandArgs, _input: Tagged, ) -> Result { - let span = call_info.name_span; + let tag = &call_info.name_tag; - if call_info.args.len() == 0 { - return Ok( - vec![ - Ok(ReturnSuccess::Action( - CommandAction::EnterHelpShell( - Tagged::from_simple_spanned_item(Value::nothing(), span) - )))].into() - ) - } - - match call_info.args.expect_nth(0)? { - Tagged { + match call_info.args.nth(0) { + Some(Tagged { item: Value::Primitive(Primitive::String(document)), - .. - } => Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell( - Tagged::from_simple_spanned_item(Value::string(document), span) - )))] - .into()), - x => Ok( - vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell(x.clone())))] - .into(), - ), + tag, + }) => { + let mut help = VecDeque::new(); + if document == "commands" { + let mut sorted_names = registry.names(); + sorted_names.sort(); + for cmd in sorted_names { + let mut short_desc = TaggedDictBuilder::new(tag.clone()); + let value = command_dict(registry.get_command(&cmd).unwrap(), tag.clone()); + + short_desc.insert("name", cmd); + short_desc.insert( + "description", + value.get_data_by_key("usage").unwrap().as_string().unwrap(), + ); + + help.push_back(ReturnSuccess::value(short_desc.into_tagged_value())); + } + } else { + if let Some(command) = registry.get_command(document) { + let mut long_desc = String::new(); + + long_desc.push_str(&command.usage()); + long_desc.push_str("\n"); + + let signature = command.signature(); + + let mut one_liner = String::new(); + one_liner.push_str(&signature.name); + one_liner.push_str(" "); + if signature.named.len() > 0 { + one_liner.push_str("{flags} "); + } + + for positional in &signature.positional { + match &positional.0 { + PositionalType::Mandatory(name, _m) => { + one_liner.push_str(&format!("<{}> ", name)); + } + PositionalType::Optional(name, _o) => { + one_liner.push_str(&format!("({}) ", name)); + } + } + } + + if signature.rest_positional.is_some() { + one_liner.push_str(&format!(" ...args",)); + } + + long_desc.push_str(&format!("\nUsage:\n > {}\n", one_liner)); + + if signature.positional.len() > 0 || signature.rest_positional.is_some() { + long_desc.push_str("\nparameters:\n"); + for positional in signature.positional { + match positional.0 { + PositionalType::Mandatory(name, _m) => { + long_desc + .push_str(&format!(" <{}> {}\n", name, positional.1)); + } + PositionalType::Optional(name, _o) => { + long_desc + .push_str(&format!(" ({}) {}\n", name, positional.1)); + } + } + } + if signature.rest_positional.is_some() { + long_desc.push_str(&format!( + " ...args{} {}\n", + if signature.rest_positional.is_some() { + ":" + } else { + "" + }, + signature.rest_positional.unwrap().1 + )); + } + } + if signature.named.len() > 0 { + long_desc.push_str("\nflags:\n"); + for (flag, ty) in signature.named { + match ty.0 { + NamedType::Switch => { + long_desc.push_str(&format!( + " --{}{} {}\n", + flag, + if ty.1.len() > 0 { ":" } else { "" }, + ty.1 + )); + } + NamedType::Mandatory(m) => { + long_desc.push_str(&format!( + " --{} <{}> (required parameter){} {}\n", + flag, + m, + if ty.1.len() > 0 { ":" } else { "" }, + ty.1 + )); + } + NamedType::Optional(o) => { + long_desc.push_str(&format!( + " --{} <{}>{} {}\n", + flag, + o, + if ty.1.len() > 0 { ":" } else { "" }, + ty.1 + )); + } + } + } + } + + help.push_back(ReturnSuccess::value( + Value::string(long_desc).tagged(tag.clone()), + )); + } + } + + Ok(help.to_output_stream()) + } + _ => { + let msg = r#"Welcome to Nushell. + +Here are some tips to help you get started. + * help commands - list all available commands + * help - display help about a particular command + +You can also learn more at https://book.nushell.sh"#; + + let mut output_stream = VecDeque::new(); + + output_stream.push_back(ReturnSuccess::value(Value::string(msg).tagged(tag))); + + Ok(output_stream.to_output_stream()) + } } } } diff --git a/src/commands/history.rs b/src/commands/history.rs new file mode 100644 index 0000000000..fdc6d655a2 --- /dev/null +++ b/src/commands/history.rs @@ -0,0 +1,49 @@ +use crate::cli::History as HistoryFile; +use crate::commands::PerItemCommand; +use crate::errors::ShellError; +use crate::parser::registry::{self}; +use crate::prelude::*; +use std::fs::File; +use std::io::{BufRead, BufReader}; + +pub struct History; + +impl PerItemCommand for History { + fn name(&self) -> &str { + "history" + } + + fn signature(&self) -> registry::Signature { + Signature::build("history") + } + + fn usage(&self) -> &str { + "Display command history." + } + + fn run( + &self, + call_info: &CallInfo, + _registry: &CommandRegistry, + _raw_args: &RawCommandArgs, + _input: Tagged, + ) -> Result { + let tag = call_info.name_tag.clone(); + + let stream = async_stream! { + let history_path = HistoryFile::path(); + let file = File::open(history_path); + if let Ok(file) = file { + let reader = BufReader::new(file); + for line in reader.lines() { + if let Ok(line) = line { + yield ReturnSuccess::value(Value::string(line).tagged(tag.clone())); + } + } + } else { + yield Err(ShellError::labeled_error("Could not open history", "history file could not be opened", tag.clone())); + } + }; + Ok(stream.to_output_stream()) + } +} diff --git a/src/commands/last.rs b/src/commands/last.rs index 1f9cc62a7f..abb10f5fce 100644 --- a/src/commands/last.rs +++ b/src/commands/last.rs @@ -7,7 +7,7 @@ pub struct Last; #[derive(Deserialize)] pub struct LastArgs { - amount: Tagged, + rows: Option>, } impl WholeStreamCommand for Last { @@ -16,7 +16,11 @@ impl WholeStreamCommand for Last { } fn signature(&self) -> Signature { - Signature::build("last").required("amount", SyntaxType::Number) + Signature::build("last").optional( + "rows", + SyntaxShape::Number, + "starting from the back, the number of rows to return", + ) } fn usage(&self) -> &str { @@ -32,16 +36,23 @@ impl WholeStreamCommand for Last { } } -fn last( - LastArgs { amount }: LastArgs, - context: RunnableContext, -) -> Result { - let stream = async_stream_block! { +fn last(LastArgs { rows }: LastArgs, context: RunnableContext) -> Result { + let stream = async_stream! { let v: Vec<_> = context.input.into_vec().await; - let k = v.len() - (*amount as usize); - for x in v[k..].iter() { - let y: Tagged = x.clone(); - yield ReturnSuccess::value(y) + + let rows_desired = if let Some(quantity) = rows { + *quantity + } else { + 1 + }; + + let count = (rows_desired as usize); + if count < v.len() { + let k = v.len() - count; + for x in v[k..].iter() { + let y: Tagged = x.clone(); + yield ReturnSuccess::value(y) + } } }; Ok(stream.to_output_stream()) diff --git a/src/commands/lines.rs b/src/commands/lines.rs index 0195177542..8375098b70 100644 --- a/src/commands/lines.rs +++ b/src/commands/lines.rs @@ -1,6 +1,6 @@ use crate::commands::WholeStreamCommand; +use crate::data::{Primitive, Value}; use crate::errors::ShellError; -use crate::object::{Primitive, Value}; use crate::prelude::*; use log::trace; @@ -32,7 +32,7 @@ impl WholeStreamCommand for Lines { fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let span = args.name_span(); + let tag = args.name_tag(); let input = args.input; let input: InputStream = trace_stream!(target: "nu::trace_stream::lines", "input" = input); @@ -58,9 +58,9 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result>, +} + impl WholeStreamCommand for LS { fn name(&self) -> &str { "ls" } fn signature(&self) -> Signature { - Signature::build("ls").optional("path", SyntaxType::Path) + Signature::build("ls").optional( + "path", + SyntaxShape::Pattern, + "a path to get the directory contents from", + ) } fn usage(&self) -> &str { @@ -22,12 +32,11 @@ impl WholeStreamCommand for LS { args: CommandArgs, registry: &CommandRegistry, ) -> Result { - ls(args, registry) + args.process(registry, ls)?.run() + // ls(args, registry) } } -fn ls(args: CommandArgs, registry: &CommandRegistry) -> Result { - let shell_manager = args.shell_manager.clone(); - let args = args.evaluate_once(registry)?; - shell_manager.ls(args) +fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result { + context.shell_manager.ls(path, &context) } diff --git a/src/commands/macros.rs b/src/commands/macros.rs index adac8d9c89..4a83f5e069 100644 --- a/src/commands/macros.rs +++ b/src/commands/macros.rs @@ -266,7 +266,7 @@ macro_rules! command { Extract { $($extract:tt)* { - use $crate::object::types::ExtractType; + use $crate::data::types::ExtractType; let value = $args.expect_nth($($positional_count)*)?; Block::extract(value)? } @@ -321,7 +321,7 @@ macro_rules! command { Extract { $($extract:tt)* { - use $crate::object::types::ExtractType; + use $crate::data::types::ExtractType; let value = $args.expect_nth($($positional_count)*)?; <$param_kind>::extract(&value)? } diff --git a/src/commands/mkdir.rs b/src/commands/mkdir.rs index 9dec9a3142..e801a27530 100644 --- a/src/commands/mkdir.rs +++ b/src/commands/mkdir.rs @@ -17,7 +17,7 @@ impl PerItemCommand for Mkdir { } fn signature(&self) -> Signature { - Signature::build("mkdir").rest(SyntaxType::Path) + Signature::build("mkdir").rest(SyntaxShape::Path, "the name(s) of the path(s) to create") } fn usage(&self) -> &str { diff --git a/src/commands/mv.rs b/src/commands/mv.rs index 130e5996e8..a9a11f5064 100644 --- a/src/commands/mv.rs +++ b/src/commands/mv.rs @@ -1,6 +1,6 @@ use crate::commands::command::RunnablePerItemContext; use crate::errors::ShellError; -use crate::parser::hir::SyntaxType; +use crate::parser::hir::SyntaxShape; use crate::parser::registry::{CommandRegistry, Signature}; use crate::prelude::*; use std::path::PathBuf; @@ -20,9 +20,16 @@ impl PerItemCommand for Move { fn signature(&self) -> Signature { Signature::build("mv") - .required("source", SyntaxType::Path) - .required("destination", SyntaxType::Path) - .named("file", SyntaxType::Any) + .required( + "source", + SyntaxShape::Pattern, + "the location to move files/directories from", + ) + .required( + "destination", + SyntaxShape::Path, + "the location to move files/directories to", + ) } fn usage(&self) -> &str { diff --git a/src/commands/nth.rs b/src/commands/nth.rs index 98ab6a10a9..bcd3057879 100644 --- a/src/commands/nth.rs +++ b/src/commands/nth.rs @@ -16,7 +16,11 @@ impl WholeStreamCommand for Nth { } fn signature(&self) -> Signature { - Signature::build("nth").required("amount", SyntaxType::Any) + Signature::build("nth").required( + "row number", + SyntaxShape::Any, + "the number of the row to return", + ) } fn usage(&self) -> &str { diff --git a/src/commands/open.rs b/src/commands/open.rs index 0faebdef77..19c7d539ed 100644 --- a/src/commands/open.rs +++ b/src/commands/open.rs @@ -1,15 +1,12 @@ use crate::commands::UnevaluatedCallInfo; -use crate::context::SpanSource; +use crate::context::AnchorLocation; +use crate::data::meta::Span; +use crate::data::Value; use crate::errors::ShellError; -use crate::object::Value; -use crate::parser::hir::SyntaxType; +use crate::parser::hir::SyntaxShape; use crate::parser::registry::Signature; use crate::prelude::*; -use mime::Mime; use std::path::{Path, PathBuf}; -use std::str::FromStr; -use surf::mime; -use uuid::Uuid; pub struct Open; impl PerItemCommand for Open { @@ -19,8 +16,12 @@ impl PerItemCommand for Open { fn signature(&self) -> Signature { Signature::build(self.name()) - .required("path", SyntaxType::Path) - .switch("raw") + .required( + "path", + SyntaxShape::Path, + "the file path to load values from", + ) + .switch("raw", "load content as a string insead of a table") } fn usage(&self) -> &str { @@ -47,21 +48,23 @@ fn run( let cwd = PathBuf::from(shell_manager.path()); let full_path = PathBuf::from(cwd); - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No file or directory specified")))? - { + let path = match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error( + "No file or directory specified", + "for command", + &call_info.name_tag, + ) + })? { file => file, }; let path_buf = path.as_path()?; let path_str = path_buf.display().to_string(); - let path_span = path.span(); + let path_span = path.tag.span; let has_raw = call_info.args.has("raw"); let registry = registry.clone(); let raw_args = raw_args.clone(); - let stream = async_stream_block! { + let stream = async_stream! { let result = fetch(&full_path, &path_str, path_span).await; @@ -69,7 +72,7 @@ fn run( yield Err(e); return; } - let (file_extension, contents, contents_tag, span_source) = result.unwrap(); + let (file_extension, contents, contents_tag) = result.unwrap(); let file_extension = if has_raw { None @@ -79,21 +82,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if let Some(uuid) = contents_tag.origin { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddSpanSource( - uuid, - span_source, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -102,21 +98,20 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, - name_span: raw_args.call_info.name_span, + name_tag: raw_args.call_info.name_tag, } }; - let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry); + let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false); let result_vec: Vec> = result.drain_vec().await; for res in result_vec { match res { - Ok(ReturnSuccess::Value(Tagged { item: Value::List(list), ..})) => { + Ok(ReturnSuccess::Value(Tagged { item: Value::Table(list), ..})) => { for l in list { yield Ok(ReturnSuccess::Value(l)); } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -136,292 +131,134 @@ pub async fn fetch( cwd: &PathBuf, location: &str, span: Span, -) -> Result<(Option, Value, Tag, SpanSource), ShellError> { +) -> Result<(Option, Value, Tag), ShellError> { let mut cwd = cwd.clone(); - if location.starts_with("http:") || location.starts_with("https:") { - let response = surf::get(location).await; - match response { - Ok(mut r) => match r.headers().get("content-type") { - Some(content_type) => { - let content_type = Mime::from_str(content_type).unwrap(); - match (content_type.type_(), content_type.subtype()) { - (mime::APPLICATION, mime::XML) => Ok(( - Some("xml".to_string()), - Value::string(r.body_string().await.map_err(|_| { - ShellError::labeled_error( - "Could not load text from remote url", - "could not load", - span, - ) - })?), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::Url(location.to_string()), - )), - (mime::APPLICATION, mime::JSON) => Ok(( - Some("json".to_string()), - Value::string(r.body_string().await.map_err(|_| { - ShellError::labeled_error( - "Could not load text from remote url", - "could not load", - span, - ) - })?), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::Url(location.to_string()), - )), - (mime::APPLICATION, mime::OCTET_STREAM) => { - let buf: Vec = r.body_bytes().await.map_err(|_| { - ShellError::labeled_error( - "Could not load binary file", - "could not load", - span, - ) - })?; - Ok(( - None, - Value::Binary(buf), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::Url(location.to_string()), - )) - } - (mime::IMAGE, mime::SVG) => Ok(( - Some("svg".to_string()), - Value::string(r.body_string().await.map_err(|_| { - ShellError::labeled_error( - "Could not load svg from remote url", - "could not load", - span, - ) - })?), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::Url(location.to_string()), - )), - (mime::IMAGE, image_ty) => { - let buf: Vec = r.body_bytes().await.map_err(|_| { - ShellError::labeled_error( - "Could not load image file", - "could not load", - span, - ) - })?; - Ok(( - Some(image_ty.to_string()), - Value::Binary(buf), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::Url(location.to_string()), - )) - } - (mime::TEXT, mime::HTML) => Ok(( - Some("html".to_string()), - Value::string(r.body_string().await.map_err(|_| { - ShellError::labeled_error( - "Could not load text from remote url", - "could not load", - span, - ) - })?), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::Url(location.to_string()), - )), - (mime::TEXT, mime::PLAIN) => { - let path_extension = url::Url::parse(location) - .unwrap() - .path_segments() - .and_then(|segments| segments.last()) - .and_then(|name| if name.is_empty() { None } else { Some(name) }) - .and_then(|name| { - PathBuf::from(name) - .extension() - .map(|name| name.to_string_lossy().to_string()) - }); - Ok(( - path_extension, - Value::string(r.body_string().await.map_err(|_| { - ShellError::labeled_error( - "Could not load text from remote url", - "could not load", + cwd.push(Path::new(location)); + if let Ok(cwd) = dunce::canonicalize(cwd) { + match std::fs::read(&cwd) { + Ok(bytes) => match std::str::from_utf8(&bytes) { + Ok(s) => Ok(( + cwd.extension() + .map(|name| name.to_string_lossy().to_string()), + Value::string(s), + Tag { + span, + anchor: Some(AnchorLocation::File(cwd.to_string_lossy().to_string())), + }, + )), + Err(_) => { + //Non utf8 data. + match (bytes.get(0), bytes.get(1)) { + (Some(x), Some(y)) if *x == 0xff && *y == 0xfe => { + // Possibly UTF-16 little endian + let utf16 = read_le_u16(&bytes[2..]); + + if let Some(utf16) = utf16 { + match std::string::String::from_utf16(&utf16) { + Ok(s) => Ok(( + cwd.extension() + .map(|name| name.to_string_lossy().to_string()), + Value::string(s), + Tag { + span, + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), + }, + )), + Err(_) => Ok(( + None, + Value::binary(bytes), + Tag { + span, + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), + }, + )), + } + } else { + Ok(( + None, + Value::binary(bytes), + Tag { span, - ) - })?), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::Url(location.to_string()), - )) + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), + }, + )) + } } - (ty, sub_ty) => Ok(( + (Some(x), Some(y)) if *x == 0xfe && *y == 0xff => { + // Possibly UTF-16 big endian + let utf16 = read_be_u16(&bytes[2..]); + + if let Some(utf16) = utf16 { + match std::string::String::from_utf16(&utf16) { + Ok(s) => Ok(( + cwd.extension() + .map(|name| name.to_string_lossy().to_string()), + Value::string(s), + Tag { + span, + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), + }, + )), + Err(_) => Ok(( + None, + Value::binary(bytes), + Tag { + span, + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), + }, + )), + } + } else { + Ok(( + None, + Value::binary(bytes), + Tag { + span, + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), + }, + )) + } + } + _ => Ok(( None, - Value::string(format!( - "Not yet supported MIME type: {} {}", - ty, sub_ty - )), + Value::binary(bytes), Tag { span, - origin: Some(Uuid::new_v4()), + anchor: Some(AnchorLocation::File( + cwd.to_string_lossy().to_string(), + )), }, - SpanSource::Url(location.to_string()), )), } } - None => Ok(( - None, - Value::string(format!("No content type found")), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::Url(location.to_string()), - )), }, Err(_) => { return Err(ShellError::labeled_error( - "URL could not be opened", - "url not found", + "File could not be opened", + "file not found", span, )); } } } else { - cwd.push(Path::new(location)); - if let Ok(cwd) = dunce::canonicalize(cwd) { - match std::fs::read(&cwd) { - Ok(bytes) => match std::str::from_utf8(&bytes) { - Ok(s) => Ok(( - cwd.extension() - .map(|name| name.to_string_lossy().to_string()), - Value::string(s), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::File(cwd.to_string_lossy().to_string()), - )), - Err(_) => { - //Non utf8 data. - match (bytes.get(0), bytes.get(1)) { - (Some(x), Some(y)) if *x == 0xff && *y == 0xfe => { - // Possibly UTF-16 little endian - let utf16 = read_le_u16(&bytes[2..]); - - if let Some(utf16) = utf16 { - match std::string::String::from_utf16(&utf16) { - Ok(s) => Ok(( - cwd.extension() - .map(|name| name.to_string_lossy().to_string()), - Value::string(s), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::File(cwd.to_string_lossy().to_string()), - )), - Err(_) => Ok(( - None, - Value::Binary(bytes), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::File(cwd.to_string_lossy().to_string()), - )), - } - } else { - Ok(( - None, - Value::Binary(bytes), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::File(cwd.to_string_lossy().to_string()), - )) - } - } - (Some(x), Some(y)) if *x == 0xfe && *y == 0xff => { - // Possibly UTF-16 big endian - let utf16 = read_be_u16(&bytes[2..]); - - if let Some(utf16) = utf16 { - match std::string::String::from_utf16(&utf16) { - Ok(s) => Ok(( - cwd.extension() - .map(|name| name.to_string_lossy().to_string()), - Value::string(s), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::File(cwd.to_string_lossy().to_string()), - )), - Err(_) => Ok(( - None, - Value::Binary(bytes), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::File(cwd.to_string_lossy().to_string()), - )), - } - } else { - Ok(( - None, - Value::Binary(bytes), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::File(cwd.to_string_lossy().to_string()), - )) - } - } - _ => Ok(( - None, - Value::Binary(bytes), - Tag { - span, - origin: Some(Uuid::new_v4()), - }, - SpanSource::File(cwd.to_string_lossy().to_string()), - )), - } - } - }, - Err(_) => { - return Err(ShellError::labeled_error( - "File could not be opened", - "file not found", - span, - )); - } - } - } else { - return Err(ShellError::labeled_error( - "File could not be opened", - "file not found", - span, - )); - } + return Err(ShellError::labeled_error( + "File could not be opened", + "file not found", + span, + )); } } diff --git a/src/commands/pick.rs b/src/commands/pick.rs index 927edf8b3d..b9c4e53bcc 100644 --- a/src/commands/pick.rs +++ b/src/commands/pick.rs @@ -1,7 +1,7 @@ use crate::commands::WholeStreamCommand; use crate::context::CommandRegistry; +use crate::data::base::select_fields; use crate::errors::ShellError; -use crate::object::base::select_fields; use crate::prelude::*; #[derive(Deserialize)] @@ -17,7 +17,7 @@ impl WholeStreamCommand for Pick { } fn signature(&self) -> Signature { - Signature::build("pick").rest(SyntaxType::Any) + Signature::build("pick").rest(SyntaxShape::Any, "the columns to select from the table") } fn usage(&self) -> &str { diff --git a/src/commands/pivot.rs b/src/commands/pivot.rs new file mode 100644 index 0000000000..0556999f2d --- /dev/null +++ b/src/commands/pivot.rs @@ -0,0 +1,136 @@ +use crate::commands::WholeStreamCommand; +use crate::errors::ShellError; +use crate::prelude::*; +use crate::TaggedDictBuilder; + +pub struct Pivot; + +#[derive(Deserialize)] +pub struct PivotArgs { + rest: Vec>, + #[serde(rename(deserialize = "header-row"))] + header_row: bool, + #[serde(rename(deserialize = "ignore-titles"))] + ignore_titles: bool, +} + +impl WholeStreamCommand for Pivot { + fn name(&self) -> &str { + "pivot" + } + + fn signature(&self) -> Signature { + Signature::build("pivot") + .switch("header-row", "treat the first row as column names") + .switch("ignore-titles", "don't pivot the column names into values") + .rest( + SyntaxShape::String, + "the names to give columns once pivoted", + ) + } + + fn usage(&self) -> &str { + "Pivots the table contents so rows become columns and columns become rows." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, pivot)?.run() + } +} + +fn merge_descriptors(values: &[Tagged]) -> Vec { + let mut ret = vec![]; + for value in values { + for desc in value.data_descriptors() { + if !ret.contains(&desc) { + ret.push(desc); + } + } + } + ret +} + +pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result { + let stream = async_stream! { + let input = context.input.into_vec().await; + + let descs = merge_descriptors(&input); + + let mut headers = vec![]; + + if args.rest.len() > 0 && args.header_row { + yield Err(ShellError::labeled_error("Can not provide header names and use header row", "using header row", context.name)); + return; + } + + if args.header_row { + for i in input.clone() { + if let Some(desc) = descs.get(0) { + match i.get_data_by_key(&desc) { + Some(x) => { + if let Ok(s) = x.as_string() { + headers.push(s); + } else { + yield Err(ShellError::labeled_error("Header row needs string headers", "used non-string headers", context.name)); + return; + } + } + _ => { + yield Err(ShellError::labeled_error("Header row is incomplete and can't be used", "using incomplete header row", context.name)); + return; + } + } + } else { + yield Err(ShellError::labeled_error("Header row is incomplete and can't be used", "using incomplete header row", context.name)); + return; + } + } + } else { + for i in 0..input.len()+1 { + if let Some(name) = args.rest.get(i) { + headers.push(name.to_string()) + } else { + headers.push(format!("Column{}", i)); + } + } + } + + let descs: Vec<_> = if args.header_row { + descs.iter().skip(1).collect() + } else { + descs.iter().collect() + }; + + for desc in descs { + let mut column_num: usize = 0; + let mut dict = TaggedDictBuilder::new(&context.name); + + if !args.ignore_titles && !args.header_row { + dict.insert(headers[column_num].clone(), Value::string(desc.clone())); + column_num += 1 + } + + for i in input.clone() { + match i.get_data_by_key(&desc) { + Some(x) => { + dict.insert_tagged(headers[column_num].clone(), x.clone()); + } + _ => { + dict.insert(headers[column_num].clone(), Value::nothing()); + } + } + column_num += 1; + } + + yield ReturnSuccess::value(dict.into_tagged_value()); + } + + + }; + + Ok(OutputStream::new(stream)) +} diff --git a/src/commands/plugin.rs b/src/commands/plugin.rs index ae9b2ec64a..5dfbe6be5b 100644 --- a/src/commands/plugin.rs +++ b/src/commands/plugin.rs @@ -128,7 +128,7 @@ pub fn filter_plugin( }, Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing begin_filter response: {:?} {}", e, input )))); @@ -138,7 +138,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading begin_filter response: {:?}", e )))); @@ -189,7 +189,7 @@ pub fn filter_plugin( }, Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing end_filter response: {:?} {}", e, input )))); @@ -199,7 +199,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading end_filter: {:?}", e )))); @@ -236,7 +236,7 @@ pub fn filter_plugin( }, Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while processing filter response: {:?} {}", e, input )))); @@ -246,7 +246,7 @@ pub fn filter_plugin( } Err(e) => { let mut result = VecDeque::new(); - result.push_back(Err(ShellError::string(format!( + result.push_back(Err(ShellError::untagged_runtime_error(format!( "Error while reading filter response: {:?}", e )))); @@ -297,7 +297,7 @@ pub fn sink_plugin( let args = args.evaluate_once(registry)?; let call_info = args.call_info.clone(); - let stream = async_stream_block! { + let stream = async_stream! { let input: Vec> = args.input.values.collect().await; let request = JsonRpc::new("sink", (call_info.clone(), input)); @@ -312,6 +312,11 @@ pub fn sink_plugin( .expect("Failed to spawn child process"); let _ = child.wait(); + + // Needed for async_stream to type check + if false { + yield ReturnSuccess::value(Value::nothing().tagged_unknown()); + } }; Ok(OutputStream::new(stream)) } diff --git a/src/commands/post.rs b/src/commands/post.rs index c1fc5d42fe..eb06cdbae5 100644 --- a/src/commands/post.rs +++ b/src/commands/post.rs @@ -1,8 +1,8 @@ use crate::commands::UnevaluatedCallInfo; -use crate::context::SpanSource; +use crate::context::AnchorLocation; +use crate::data::Value; use crate::errors::ShellError; -use crate::object::Value; -use crate::parser::hir::SyntaxType; +use crate::parser::hir::SyntaxShape; use crate::parser::registry::Signature; use crate::prelude::*; use base64::encode; @@ -10,7 +10,12 @@ use mime::Mime; use std::path::PathBuf; use std::str::FromStr; use surf::mime; -use uuid::Uuid; + +pub enum HeaderKind { + ContentType(String), + ContentLength(String), +} + pub struct Post; impl PerItemCommand for Post { @@ -20,11 +25,25 @@ impl PerItemCommand for Post { fn signature(&self) -> Signature { Signature::build(self.name()) - .required("path", SyntaxType::Any) - .required("body", SyntaxType::Any) - .named("user", SyntaxType::Any) - .named("password", SyntaxType::Any) - .switch("raw") + .required("path", SyntaxShape::Any, "the URL to post to") + .required("body", SyntaxShape::Any, "the contents of the post body") + .named("user", SyntaxShape::Any, "the username when authenticating") + .named( + "password", + SyntaxShape::Any, + "the password when authenticating", + ) + .named( + "content-type", + SyntaxShape::Any, + "the MIME type of content to post", + ) + .named( + "content-length", + SyntaxShape::Any, + "the length of the content being posted", + ) + .switch("raw", "return values as a string instead of a table") } fn usage(&self) -> &str { @@ -47,23 +66,22 @@ fn run( registry: &CommandRegistry, raw_args: &RawCommandArgs, ) -> Result { + let name_tag = call_info.name_tag.clone(); let call_info = call_info.clone(); - let path = match call_info - .args - .nth(0) - .ok_or_else(|| ShellError::string(&format!("No url specified")))? - { - file => file.clone(), - }; - let body = match call_info - .args - .nth(1) - .ok_or_else(|| ShellError::string(&format!("No body specified")))? - { - file => file.clone(), - }; + let path = + match call_info.args.nth(0).ok_or_else(|| { + ShellError::labeled_error("No url specified", "for command", &name_tag) + })? { + file => file.clone(), + }; + let body = + match call_info.args.nth(1).ok_or_else(|| { + ShellError::labeled_error("No body specified", "for command", &name_tag) + })? { + file => file.clone(), + }; let path_str = path.as_string()?; - let path_span = path.span(); + let path_span = path.tag(); let has_raw = call_info.args.has("raw"); let user = call_info.args.get("user").map(|x| x.as_string().unwrap()); let password = call_info @@ -73,9 +91,11 @@ fn run( let registry = registry.clone(); let raw_args = raw_args.clone(); - let stream = async_stream_block! { - let (file_extension, contents, contents_tag, span_source) = - post(&path_str, &body, user, password, path_span, ®istry, &raw_args).await.unwrap(); + let headers = get_headers(&call_info)?; + + let stream = async_stream! { + let (file_extension, contents, contents_tag) = + post(&path_str, &body, user, password, &headers, path_span, ®istry, &raw_args).await.unwrap(); let file_extension = if has_raw { None @@ -85,21 +105,14 @@ fn run( file_extension.or(path_str.split('.').last().map(String::from)) }; - if let Some(uuid) = contents_tag.origin { - // If we have loaded something, track its source - yield ReturnSuccess::action(CommandAction::AddSpanSource( - uuid, - span_source, - )); - } - - let tagged_contents = contents.tagged(contents_tag); + let tagged_contents = contents.tagged(&contents_tag); if let Some(extension) = file_extension { let command_name = format!("from-{}", extension); if let Some(converter) = registry.get_command(&command_name) { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -108,21 +121,20 @@ fn run( named: None }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, - name_span: raw_args.call_info.name_span, + name_tag: raw_args.call_info.name_tag, } }; - let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry); + let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false); let result_vec: Vec> = result.drain_vec().await; for res in result_vec { match res { - Ok(ReturnSuccess::Value(Tagged { item: Value::List(list), ..})) => { + Ok(ReturnSuccess::Value(Tagged { item: Value::Table(list), ..})) => { for l in list { yield Ok(ReturnSuccess::Value(l)); } } Ok(ReturnSuccess::Value(Tagged { item, .. })) => { - yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); + yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() })); } x => yield x, } @@ -138,15 +150,71 @@ fn run( Ok(stream.to_output_stream()) } +fn get_headers(call_info: &CallInfo) -> Result, ShellError> { + let mut headers = vec![]; + + match extract_header_value(&call_info, "content-type") { + Ok(h) => match h { + Some(ct) => headers.push(HeaderKind::ContentType(ct)), + None => {} + }, + Err(e) => { + return Err(e); + } + }; + + match extract_header_value(&call_info, "content-length") { + Ok(h) => match h { + Some(cl) => headers.push(HeaderKind::ContentLength(cl)), + None => {} + }, + Err(e) => { + return Err(e); + } + }; + + Ok(headers) +} + +fn extract_header_value(call_info: &CallInfo, key: &str) -> Result, ShellError> { + if call_info.args.has(key) { + let tagged = call_info.args.get(key); + let val = match tagged { + Some(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + }) => s.clone(), + Some(Tagged { tag, .. }) => { + return Err(ShellError::labeled_error( + format!("{} not in expected format. Expected string.", key), + "post error", + tag, + )); + } + _ => { + return Err(ShellError::labeled_error( + format!("{} not in expected format. Expected string.", key), + "post error", + Tag::unknown(), + )); + } + }; + return Ok(Some(val)); + } + + Ok(None) +} + pub async fn post( location: &str, body: &Tagged, user: Option, password: Option, - span: Span, + headers: &Vec, + tag: Tag, registry: &CommandRegistry, raw_args: &RawCommandArgs, -) -> Result<(Option, Value, Tag, SpanSource), ShellError> { +) -> Result<(Option, Value, Tag), ShellError> { let registry = registry.clone(); let raw_args = raw_args.clone(); if location.starts_with("http:") || location.starts_with("https:") { @@ -164,10 +232,17 @@ pub async fn post( if let Some(login) = login { s = s.set_header("Authorization", format!("Basic {}", login)); } + + for h in headers { + s = match h { + HeaderKind::ContentType(ct) => s.set_header("Content-Type", ct), + HeaderKind::ContentLength(cl) => s.set_header("Content-Length", cl), + }; + } s.await } Tagged { - item: Value::Binary(b), + item: Value::Primitive(Primitive::Binary(b)), .. } => { let mut s = surf::post(location).body_bytes(b); @@ -180,6 +255,7 @@ pub async fn post( if let Some(converter) = registry.get_command("to-json") { let new_args = RawCommandArgs { host: raw_args.host, + ctrl_c: raw_args.ctrl_c, shell_manager: raw_args.shell_manager, call_info: UnevaluatedCallInfo { args: crate::parser::hir::Call { @@ -188,13 +264,13 @@ pub async fn post( named: None, }, source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, - name_span: raw_args.call_info.name_span, + name_tag: raw_args.call_info.name_tag, }, }; let mut result = converter.run( new_args.with_input(vec![item.clone().tagged(tag.clone())]), ®istry, + false, ); let result_vec: Vec> = result.drain_vec().await; @@ -211,7 +287,7 @@ pub async fn post( return Err(ShellError::labeled_error( "Save could not successfully save", "unexpected data during save", - span, + tag, )); } } @@ -227,7 +303,7 @@ pub async fn post( return Err(ShellError::labeled_error( "Could not automatically convert table", "needs manual conversion", - tag.span, + tag, )); } } @@ -243,14 +319,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - span, + &tag, ) })?), Tag { - span, - origin: Some(Uuid::new_v4()), + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, }, - SpanSource::Url(location.to_string()), )), (mime::APPLICATION, mime::JSON) => Ok(( Some("json".to_string()), @@ -258,31 +333,29 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - span, + &tag, ) })?), Tag { - span, - origin: Some(Uuid::new_v4()), + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, }, - SpanSource::Url(location.to_string()), )), (mime::APPLICATION, mime::OCTET_STREAM) => { let buf: Vec = r.body_bytes().await.map_err(|_| { ShellError::labeled_error( "Could not load binary file", "could not load", - span, + &tag, ) })?; Ok(( None, - Value::Binary(buf), + Value::binary(buf), Tag { - span, - origin: Some(Uuid::new_v4()), + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, }, - SpanSource::Url(location.to_string()), )) } (mime::IMAGE, image_ty) => { @@ -290,17 +363,16 @@ pub async fn post( ShellError::labeled_error( "Could not load image file", "could not load", - span, + &tag, ) })?; Ok(( Some(image_ty.to_string()), - Value::Binary(buf), + Value::binary(buf), Tag { - span, - origin: Some(Uuid::new_v4()), + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, }, - SpanSource::Url(location.to_string()), )) } (mime::TEXT, mime::HTML) => Ok(( @@ -309,14 +381,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - span, + &tag, ) })?), Tag { - span, - origin: Some(Uuid::new_v4()), + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, }, - SpanSource::Url(location.to_string()), )), (mime::TEXT, mime::PLAIN) => { let path_extension = url::Url::parse(location) @@ -336,14 +407,13 @@ pub async fn post( ShellError::labeled_error( "Could not load text from remote url", "could not load", - span, + &tag, ) })?), Tag { - span, - origin: Some(Uuid::new_v4()), + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, }, - SpanSource::Url(location.to_string()), )) } (ty, sub_ty) => Ok(( @@ -353,10 +423,9 @@ pub async fn post( ty, sub_ty )), Tag { - span, - origin: Some(Uuid::new_v4()), + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, }, - SpanSource::Url(location.to_string()), )), } } @@ -364,17 +433,16 @@ pub async fn post( None, Value::string(format!("No content type found")), Tag { - span, - origin: Some(Uuid::new_v4()), + anchor: Some(AnchorLocation::Url(location.to_string())), + span: tag.span, }, - SpanSource::Url(location.to_string()), )), }, Err(_) => { return Err(ShellError::labeled_error( "URL could not be opened", "url not found", - span, + tag, )); } } @@ -382,7 +450,7 @@ pub async fn post( Err(ShellError::labeled_error( "Expected a url", "needs a url", - span, + tag, )) } } diff --git a/src/commands/prepend.rs b/src/commands/prepend.rs new file mode 100644 index 0000000000..b6fa935b0b --- /dev/null +++ b/src/commands/prepend.rs @@ -0,0 +1,47 @@ +use crate::commands::WholeStreamCommand; +use crate::errors::ShellError; +use crate::parser::CommandRegistry; +use crate::prelude::*; + +#[derive(Deserialize)] +struct PrependArgs { + row: Tagged, +} + +pub struct Prepend; + +impl WholeStreamCommand for Prepend { + fn name(&self) -> &str { + "prepend" + } + + fn signature(&self) -> Signature { + Signature::build("prepend").required( + "row value", + SyntaxShape::Any, + "the value of the row to prepend to the table", + ) + } + + fn usage(&self) -> &str { + "Prepend the given row to the front of the table" + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + args.process(registry, prepend)?.run() + } +} + +fn prepend( + PrependArgs { row }: PrependArgs, + RunnableContext { input, .. }: RunnableContext, +) -> Result { + let mut prepend: VecDeque> = VecDeque::new(); + prepend.push_back(row); + + Ok(OutputStream::from_input(prepend.chain(input.values))) +} diff --git a/src/commands/ps.rs b/src/commands/ps.rs deleted file mode 100644 index b11e797961..0000000000 --- a/src/commands/ps.rs +++ /dev/null @@ -1,76 +0,0 @@ -use crate::commands::WholeStreamCommand; -use crate::errors::ShellError; -use crate::object::TaggedDictBuilder; -use crate::prelude::*; -use std::time::Duration; -use std::usize; - -use futures::stream::{StreamExt, TryStreamExt}; -use heim::process::{self as process, Process, ProcessResult}; -use heim::units::{ratio, Ratio}; - -pub struct PS; - -impl WholeStreamCommand for PS { - fn name(&self) -> &str { - "ps" - } - - fn signature(&self) -> Signature { - Signature::build("ps") - } - - fn usage(&self) -> &str { - "View current processes." - } - - fn run( - &self, - args: CommandArgs, - registry: &CommandRegistry, - ) -> Result { - ps(args, registry) - } -} - -async fn usage(process: Process) -> ProcessResult<(process::Process, Ratio)> { - let usage_1 = process.cpu_usage().await?; - futures_timer::Delay::new(Duration::from_millis(100)).await?; - let usage_2 = process.cpu_usage().await?; - - Ok((process, usage_2 - usage_1)) -} - -fn ps(args: CommandArgs, registry: &CommandRegistry) -> Result { - let args = args.evaluate_once(registry)?; - let span = args.name_span(); - - let stream = async_stream_block! { - let processes = process::processes() - .map_ok(|process| { - // Note that there is no `.await` here, - // as we want to pass the returned future - // into the `.try_buffer_unordered`. - usage(process) - }) - .try_buffer_unordered(usize::MAX); - pin_utils::pin_mut!(processes); - - while let Some(res) = processes.next().await { - if let Ok((process, usage)) = res { - let mut dict = TaggedDictBuilder::new(Tag::unknown_origin(span)); - dict.insert("pid", Value::int(process.pid())); - if let Ok(name) = process.name().await { - dict.insert("name", Value::string(name)); - } - if let Ok(status) = process.status().await { - dict.insert("status", Value::string(format!("{:?}", status))); - } - dict.insert("cpu", Value::number(usage.get::())); - yield ReturnSuccess::value(dict.into_tagged_value()); - } - } - }; - - Ok(stream.to_output_stream()) -} diff --git a/src/commands/pwd.rs b/src/commands/pwd.rs new file mode 100644 index 0000000000..37e2668bdb --- /dev/null +++ b/src/commands/pwd.rs @@ -0,0 +1,34 @@ +use crate::commands::WholeStreamCommand; +use crate::errors::ShellError; +use crate::parser::registry::Signature; +use crate::prelude::*; + +pub struct PWD; + +impl WholeStreamCommand for PWD { + fn name(&self) -> &str { + "pwd" + } + + fn signature(&self) -> Signature { + Signature::build("pwd") + } + + fn usage(&self) -> &str { + "Output the current working directory." + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + pwd(args, registry) + } +} + +pub fn pwd(args: CommandArgs, registry: &CommandRegistry) -> Result { + let shell_manager = args.shell_manager.clone(); + let args = args.evaluate_once(registry)?; + shell_manager.pwd(args) +} diff --git a/src/commands/reject.rs b/src/commands/reject.rs index b519f8ea64..f02a72aa4c 100644 --- a/src/commands/reject.rs +++ b/src/commands/reject.rs @@ -1,6 +1,6 @@ use crate::commands::WholeStreamCommand; +use crate::data::base::reject_fields; use crate::errors::ShellError; -use crate::object::base::reject_fields; use crate::prelude::*; #[derive(Deserialize)] @@ -16,7 +16,7 @@ impl WholeStreamCommand for Reject { } fn signature(&self) -> Signature { - Signature::build("reject").rest(SyntaxType::Member) + Signature::build("reject").rest(SyntaxShape::Member, "the names of columns to remove") } fn usage(&self) -> &str { diff --git a/src/commands/rm.rs b/src/commands/rm.rs index 60bb5c6e4a..76222d2c28 100644 --- a/src/commands/rm.rs +++ b/src/commands/rm.rs @@ -1,6 +1,6 @@ use crate::commands::command::RunnablePerItemContext; use crate::errors::ShellError; -use crate::parser::hir::SyntaxType; +use crate::parser::hir::SyntaxShape; use crate::parser::registry::{CommandRegistry, Signature}; use crate::prelude::*; use std::path::PathBuf; @@ -11,6 +11,7 @@ pub struct Remove; pub struct RemoveArgs { pub target: Tagged, pub recursive: Tagged, + pub trash: Tagged, } impl PerItemCommand for Remove { @@ -20,12 +21,16 @@ impl PerItemCommand for Remove { fn signature(&self) -> Signature { Signature::build("rm") - .required("path", SyntaxType::Path) - .switch("recursive") + .required("path", SyntaxShape::Pattern, "the file path to remove") + .switch( + "trash", + "use the platform's recycle bin instead of permanently deleting", + ) + .switch("recursive", "delete subdirectories recursively") } fn usage(&self) -> &str { - "Remove a file, (for removing directory append '--recursive')" + "Remove a file" } fn run( diff --git a/src/commands/save.rs b/src/commands/save.rs index c32c016f4b..45063dca4e 100644 --- a/src/commands/save.rs +++ b/src/commands/save.rs @@ -1,11 +1,85 @@ use crate::commands::{UnevaluatedCallInfo, WholeStreamCommand}; +use crate::data::Value; use crate::errors::ShellError; -use crate::object::Value; use crate::prelude::*; use std::path::{Path, PathBuf}; pub struct Save; +macro_rules! process_string { + ($scope:tt, $input:ident, $name_tag:ident) => {{ + let mut result_string = String::new(); + for res in $input { + match res { + Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + } => { + result_string.push_str(&s); + } + _ => { + break $scope Err(ShellError::labeled_error( + "Save could not successfully save", + "unexpected data during save", + $name_tag, + )); + } + } + } + Ok(result_string.into_bytes()) + }}; +} + +macro_rules! process_string_return_success { + ($scope:tt, $result_vec:ident, $name_tag:ident) => {{ + let mut result_string = String::new(); + for res in $result_vec { + match res { + Ok(ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + })) => { + result_string.push_str(&s); + } + _ => { + break $scope Err(ShellError::labeled_error( + "Save could not successfully save", + "unexpected data during text save", + $name_tag, + )); + } + } + } + Ok(result_string.into_bytes()) + }}; +} + +macro_rules! process_binary_return_success { + ($scope:tt, $result_vec:ident, $name_tag:ident) => {{ + let mut result_binary: Vec = Vec::new(); + for res in $result_vec { + match res { + Ok(ReturnSuccess::Value(Tagged { + item: Value::Primitive(Primitive::Binary(b)), + .. + })) => { + for u in b.into_iter() { + result_binary.push(u); + } + } + _ => { + break $scope Err(ShellError::labeled_error( + "Save could not successfully save", + "unexpected data during binary save", + $name_tag, + )); + } + } + } + Ok(result_binary) + }}; +} + #[derive(Deserialize)] pub struct SaveArgs { path: Option>, @@ -19,8 +93,11 @@ impl WholeStreamCommand for Save { fn signature(&self) -> Signature { Signature::build("save") - .optional("path", SyntaxType::Path) - .switch("raw") + .optional("path", SyntaxShape::Path, "the path to save contents to") + .switch( + "raw", + "treat values as-is rather than auto-converting based on file extension", + ) } fn usage(&self) -> &str { @@ -45,49 +122,48 @@ fn save( input, name, shell_manager, - source_map, host, + ctrl_c, commands: registry, .. }: RunnableContext, raw_args: RawCommandArgs, ) -> Result { let mut full_path = PathBuf::from(shell_manager.path()); - let name_span = name; + let name_tag = name.clone(); - let source_map = source_map.clone(); - let stream = async_stream_block! { + let stream = async_stream! { let input: Vec> = input.values.collect().await; if path.is_none() { - // If there is no filename, check the metadata for the origin filename + // If there is no filename, check the metadata for the anchor filename if input.len() > 0 { - let origin = input[0].origin(); - match origin.and_then(|x| source_map.get(&x)) { + let anchor = input[0].anchor(); + match anchor { Some(path) => match path { - SpanSource::File(file) => { - full_path.push(Path::new(file)); + AnchorLocation::File(file) => { + full_path.push(Path::new(&file)); } _ => { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (1)", "needs path", - name_span, + name_tag.clone(), )); } }, None => { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (2)", "needs path", - name_span, + name_tag.clone(), )); } } } else { yield Err(ShellError::labeled_error( - "Save requires a filepath", + "Save requires a filepath (3)", "needs path", - name_span, + name_tag.clone(), )); } } else { @@ -96,88 +172,51 @@ fn save( } } - let content = if !save_raw { - if let Some(extension) = full_path.extension() { - let command_name = format!("to-{}", extension.to_str().unwrap()); - if let Some(converter) = registry.get_command(&command_name) { - let new_args = RawCommandArgs { - host, - shell_manager, - call_info: UnevaluatedCallInfo { - args: crate::parser::hir::Call { - head: raw_args.call_info.args.head, - positional: None, - named: None - }, - source: raw_args.call_info.source, - source_map: raw_args.call_info.source_map, - name_span: raw_args.call_info.name_span, - } - }; - let mut result = converter.run(new_args.with_input(input), ®istry); - let result_vec: Vec> = result.drain_vec().await; - let mut result_string = String::new(); - for res in result_vec { - match res { - Ok(ReturnSuccess::Value(Tagged { item: Value::Primitive(Primitive::String(s)), .. })) => { - result_string.push_str(&s); + // TODO use label_break_value once it is stable: + // https://github.com/rust-lang/rust/issues/48594 + let content : Result, ShellError> = 'scope: loop { + break if !save_raw { + if let Some(extension) = full_path.extension() { + let command_name = format!("to-{}", extension.to_str().unwrap()); + if let Some(converter) = registry.get_command(&command_name) { + let new_args = RawCommandArgs { + host, + ctrl_c, + shell_manager, + call_info: UnevaluatedCallInfo { + args: crate::parser::hir::Call { + head: raw_args.call_info.args.head, + positional: None, + named: None + }, + source: raw_args.call_info.source, + name_tag: raw_args.call_info.name_tag, } - _ => { - yield Err(ShellError::labeled_error( - "Save could not successfully save", - "unexpected data during save", - name_span, - )); - }, + }; + let mut result = converter.run(new_args.with_input(input), ®istry, false); + let result_vec: Vec> = result.drain_vec().await; + if converter.is_binary() { + process_binary_return_success!('scope, result_vec, name_tag) + } else { + process_string_return_success!('scope, result_vec, name_tag) } + } else { + process_string!('scope, input, name_tag) } - Ok(result_string) } else { - let mut result_string = String::new(); - for res in input { - match res { - Tagged { item: Value::Primitive(Primitive::String(s)), .. } => { - result_string.push_str(&s); - } - _ => { - yield Err(ShellError::labeled_error( - "Save could not successfully save", - "unexpected data during save", - name_span, - )); - }, - } - } - Ok(result_string) + process_string!('scope, input, name_tag) } } else { - let mut result_string = String::new(); - for res in input { - match res { - Tagged { item: Value::Primitive(Primitive::String(s)), .. } => { - result_string.push_str(&s); - } - _ => { - yield Err(ShellError::labeled_error( - "Save could not successfully save", - "unexpected data during save", - name_span, - )); - }, - } - } - Ok(result_string) - } - } else { - string_from(&input) + Ok(string_from(&input).into_bytes()) + }; }; match content { Ok(save_data) => match std::fs::write(full_path, save_data) { Ok(o) => o, - Err(e) => yield Err(ShellError::string(e.to_string())), + Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)), }, - Err(e) => yield Err(ShellError::string(e.to_string())), + Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)), } }; @@ -185,7 +224,7 @@ fn save( Ok(OutputStream::new(stream)) } -fn string_from(input: &Vec>) -> Result { +fn string_from(input: &Vec>) -> String { let mut save_data = String::new(); if input.len() > 0 { @@ -202,5 +241,5 @@ fn string_from(input: &Vec>) -> Result { } } - Ok(save_data) + save_data } diff --git a/src/commands/shells.rs b/src/commands/shells.rs index 5e0159e147..6058a42032 100644 --- a/src/commands/shells.rs +++ b/src/commands/shells.rs @@ -1,7 +1,8 @@ use crate::commands::WholeStreamCommand; +use crate::data::TaggedDictBuilder; use crate::errors::ShellError; -use crate::object::TaggedDictBuilder; use crate::prelude::*; +use std::sync::atomic::Ordering; pub struct Shells; @@ -29,17 +30,17 @@ impl WholeStreamCommand for Shells { fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result { let mut shells_out = VecDeque::new(); - let span = args.call_info.name_span; + let tag = args.call_info.name_tag; for (index, shell) in args.shell_manager.shells.lock().unwrap().iter().enumerate() { - let mut dict = TaggedDictBuilder::new(Tag::unknown_origin(span)); + let mut dict = TaggedDictBuilder::new(&tag); - if index == args.shell_manager.current_shell { + if index == (*args.shell_manager.current_shell).load(Ordering::SeqCst) { dict.insert(" ", "X".to_string()); } else { dict.insert(" ", " ".to_string()); } - dict.insert("name", shell.name(&args.call_info.source_map)); + dict.insert("name", shell.name()); dict.insert("path", shell.path()); shells_out.push_back(dict.into_tagged_value()); diff --git a/src/commands/size.rs b/src/commands/size.rs index 02da0460cb..ed996f7cb0 100644 --- a/src/commands/size.rs +++ b/src/commands/size.rs @@ -1,6 +1,6 @@ use crate::commands::WholeStreamCommand; +use crate::data::{TaggedDictBuilder, Value}; use crate::errors::ShellError; -use crate::object::{TaggedDictBuilder, Value}; use crate::prelude::*; pub struct Size; @@ -29,7 +29,7 @@ impl WholeStreamCommand for Size { fn size(args: CommandArgs, _registry: &CommandRegistry) -> Result { let input = args.input; - let span = args.call_info.name_span; + let tag = args.call_info.name_tag; Ok(input .values .map(move |v| match v.item { @@ -37,9 +37,9 @@ fn size(args: CommandArgs, _registry: &CommandRegistry) -> Result Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - span, + &tag, "value originates from here", - v.span(), + v.tag(), )), }) .to_output_stream()) @@ -71,7 +71,7 @@ fn count(contents: &str, tag: impl Into) -> Tagged { } let mut dict = TaggedDictBuilder::new(tag); - //TODO: add back in name when we have it in the span + //TODO: add back in name when we have it in the tag //dict.insert("name", Value::string(name)); dict.insert("lines", Value::int(lines)); dict.insert("words", Value::int(words)); diff --git a/src/commands/skip_while.rs b/src/commands/skip_while.rs index 90ba24b996..e8bec7dac2 100644 --- a/src/commands/skip_while.rs +++ b/src/commands/skip_while.rs @@ -1,6 +1,7 @@ use crate::commands::WholeStreamCommand; use crate::errors::ShellError; use crate::prelude::*; +use log::trace; pub struct SkipWhile; @@ -16,7 +17,11 @@ impl WholeStreamCommand for SkipWhile { fn signature(&self) -> Signature { Signature::build("skip-while") - .required("condition", SyntaxType::Block) + .required( + "condition", + SyntaxShape::Block, + "the condition that must be met to continue skipping", + ) .filter() } @@ -38,7 +43,9 @@ pub fn skip_while( RunnableContext { input, .. }: RunnableContext, ) -> Result { let objects = input.values.skip_while(move |item| { + trace!("ITEM = {:?}", item); let result = condition.invoke(&item); + trace!("RESULT = {:?}", result); let return_value = match result { Ok(ref v) if v.is_true() => true, diff --git a/src/commands/sort_by.rs b/src/commands/sort_by.rs index edce33b963..d384207c92 100644 --- a/src/commands/sort_by.rs +++ b/src/commands/sort_by.rs @@ -15,7 +15,7 @@ impl WholeStreamCommand for SortBy { } fn signature(&self) -> Signature { - Signature::build("sort-by").rest(SyntaxType::String) + Signature::build("sort-by").rest(SyntaxShape::String, "the column(s) to sort by") } fn usage(&self) -> &str { @@ -35,7 +35,7 @@ fn sort_by( SortByArgs { rest }: SortByArgs, mut context: RunnableContext, ) -> Result { - Ok(OutputStream::new(async_stream_block! { + Ok(OutputStream::new(async_stream! { let mut vec = context.input.drain_vec().await; let calc_key = |item: &Tagged| { diff --git a/src/commands/split_column.rs b/src/commands/split_column.rs index 4bde5e25b0..fd872d452d 100644 --- a/src/commands/split_column.rs +++ b/src/commands/split_column.rs @@ -1,6 +1,6 @@ use crate::commands::WholeStreamCommand; +use crate::data::{Primitive, TaggedDictBuilder, Value}; use crate::errors::ShellError; -use crate::object::{Primitive, TaggedDictBuilder, Value}; use crate::prelude::*; use log::trace; @@ -21,9 +21,13 @@ impl WholeStreamCommand for SplitColumn { fn signature(&self) -> Signature { Signature::build("split-column") - .required("separator", SyntaxType::Any) - .switch("collapse-empty") - .rest(SyntaxType::Member) + .required( + "separator", + SyntaxShape::Any, + "the character that denotes what separates columns", + ) + .switch("collapse-empty", "remove empty columns") + .rest(SyntaxShape::Member, "column names to give the new columns") } fn usage(&self) -> &str { @@ -40,7 +44,11 @@ impl WholeStreamCommand for SplitColumn { } fn split_column( - SplitColumnArgs { separator, rest, collapse_empty}: SplitColumnArgs, + SplitColumnArgs { + separator, + rest, + collapse_empty, + }: SplitColumnArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { Ok(input @@ -90,9 +98,9 @@ fn split_column( _ => Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name, + &name, "value originates from here", - v.span(), + v.tag(), )), }) .to_output_stream()) diff --git a/src/commands/split_row.rs b/src/commands/split_row.rs index fa95225f4f..6c848c325a 100644 --- a/src/commands/split_row.rs +++ b/src/commands/split_row.rs @@ -1,6 +1,6 @@ use crate::commands::WholeStreamCommand; +use crate::data::{Primitive, Value}; use crate::errors::ShellError; -use crate::object::{Primitive, Value}; use crate::prelude::*; use log::trace; @@ -17,8 +17,11 @@ impl WholeStreamCommand for SplitRow { } fn signature(&self) -> Signature { - Signature::build("split-row") - .required("separator", SyntaxType::Any) + Signature::build("split-row").required( + "separator", + SyntaxShape::Any, + "the character that denotes what separates rows", + ) } fn usage(&self) -> &str { @@ -61,9 +64,9 @@ fn split_row( result.push_back(Err(ShellError::labeled_error_with_secondary( "Expected a string from pipeline", "requires string input", - name, + &name, "value originates from here", - v.span(), + v.tag(), ))); result } diff --git a/src/commands/table.rs b/src/commands/table.rs index 4efd6f821f..f8cdcd13c7 100644 --- a/src/commands/table.rs +++ b/src/commands/table.rs @@ -2,20 +2,20 @@ use crate::commands::WholeStreamCommand; use crate::errors::ShellError; use crate::format::TableView; use crate::prelude::*; -use futures_async_stream::async_stream_block; pub struct Table; -#[derive(Deserialize)] -pub struct TableArgs {} - impl WholeStreamCommand for Table { fn name(&self) -> &str { "table" } fn signature(&self) -> Signature { - Signature::build("table") + Signature::build("table").named( + "start_number", + SyntaxShape::Number, + "row number to start viewing from", + ) } fn usage(&self) -> &str { @@ -27,20 +27,37 @@ impl WholeStreamCommand for Table { args: CommandArgs, registry: &CommandRegistry, ) -> Result { - args.process(registry, table)?.run() + table(args, registry) } } -pub fn table(_args: TableArgs, context: RunnableContext) -> Result { - let stream = async_stream_block! { - let input: Vec> = context.input.into_vec().await; +fn table(args: CommandArgs, registry: &CommandRegistry) -> Result { + let args = args.evaluate_once(registry)?; + + let stream = async_stream! { + let host = args.host.clone(); + let start_number = match args.get("start_number") { + Some(Tagged { item: Value::Primitive(Primitive::Int(i)), .. }) => { + i.to_usize().unwrap() + } + _ => { + 0 + } + }; + + let input: Vec> = args.input.into_vec().await; if input.len() > 0 { - let mut host = context.host.lock().unwrap(); - let view = TableView::from_list(&input); + let mut host = host.lock().unwrap(); + let view = TableView::from_list(&input, start_number); + if let Some(view) = view { handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host)); } } + // Needed for async_stream to type check + if false { + yield ReturnSuccess::value(Value::nothing().tagged_unknown()); + } }; Ok(OutputStream::new(stream)) diff --git a/src/commands/tags.rs b/src/commands/tags.rs index 973105709c..221e8cc303 100644 --- a/src/commands/tags.rs +++ b/src/commands/tags.rs @@ -1,6 +1,6 @@ use crate::commands::WholeStreamCommand; +use crate::data::{TaggedDictBuilder, Value}; use crate::errors::ShellError; -use crate::object::{TaggedDictBuilder, Value}; use crate::prelude::*; pub struct Tags; @@ -28,26 +28,25 @@ impl WholeStreamCommand for Tags { } fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result { - let source_map = args.call_info.source_map.clone(); Ok(args .input .values .map(move |v| { let mut tags = TaggedDictBuilder::new(v.tag()); { - let origin = v.origin(); - let span = v.span(); + let anchor = v.anchor(); + let span = v.tag().span; let mut dict = TaggedDictBuilder::new(v.tag()); - dict.insert("start", Value::int(span.start as i64)); - dict.insert("end", Value::int(span.end as i64)); + dict.insert("start", Value::int(span.start() as i64)); + dict.insert("end", Value::int(span.end() as i64)); tags.insert_tagged("span", dict.into_tagged_value()); - match origin.and_then(|x| source_map.get(&x)) { - Some(SpanSource::File(source)) => { - tags.insert("origin", Value::string(source)); + match anchor { + Some(AnchorLocation::File(source)) => { + tags.insert("anchor", Value::string(source)); } - Some(SpanSource::Url(source)) => { - tags.insert("origin", Value::string(source)); + Some(AnchorLocation::Url(source)) => { + tags.insert("anchor", Value::string(source)); } _ => {} } diff --git a/src/commands/to_array.rs b/src/commands/to_array.rs deleted file mode 100644 index 04c429e1b4..0000000000 --- a/src/commands/to_array.rs +++ /dev/null @@ -1,38 +0,0 @@ -use crate::commands::WholeStreamCommand; -use crate::object::Value; -use crate::prelude::*; - -pub struct ToArray; - -impl WholeStreamCommand for ToArray { - fn name(&self) -> &str { - "to-array" - } - - fn signature(&self) -> Signature { - Signature::build("to-array") - } - - fn usage(&self) -> &str { - "Collapse rows into a single list." - } - - fn run( - &self, - args: CommandArgs, - registry: &CommandRegistry, - ) -> Result { - to_array(args, registry) - } -} - -fn to_array(args: CommandArgs, registry: &CommandRegistry) -> Result { - let args = args.evaluate_once(registry)?; - let span = args.call_info.name_span; - let out = args.input.values.collect(); - - Ok(out - .map(move |vec: Vec<_>| stream![Value::List(vec).simple_spanned(span)]) - .flatten_stream() - .from_input_stream()) -} diff --git a/src/commands/to_bson.rs b/src/commands/to_bson.rs index 35fc9839d4..eabf8381ec 100644 --- a/src/commands/to_bson.rs +++ b/src/commands/to_bson.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Dictionary, Primitive, Value}; +use crate::data::{Dictionary, Primitive, Value}; use crate::prelude::*; use bson::{encode_document, oid::ObjectId, spec::BinarySubtype, Bson, Document}; use std::convert::TryInto; @@ -26,6 +26,10 @@ impl WholeStreamCommand for ToBSON { ) -> Result { to_bson(args, registry) } + + fn is_binary(&self) -> bool { + true + } } pub fn value_to_bson_value(v: &Tagged) -> Result { @@ -42,19 +46,21 @@ pub fn value_to_bson_value(v: &Tagged) -> Result { Value::Primitive(Primitive::BeginningOfStream) => Bson::Null, Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()), Value::Primitive(Primitive::Int(i)) => { - Bson::I64(i.tagged(v.tag).coerce_into("converting to BSON")?) + Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?) } Value::Primitive(Primitive::Nothing) => Bson::Null, Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()), + Value::Primitive(Primitive::Pattern(p)) => Bson::String(p.clone()), Value::Primitive(Primitive::Path(s)) => Bson::String(s.display().to_string()), - Value::List(l) => Bson::Array( + Value::Table(l) => Bson::Array( l.iter() .map(|x| value_to_bson_value(x)) .collect::>()?, ), Value::Block(_) => Bson::Null, - Value::Binary(b) => Bson::Binary(BinarySubtype::Generic, b.clone()), - Value::Object(o) => object_value_to_bson(o)?, + Value::Error(e) => return Err(e.clone()), + Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()), + Value::Row(o) => object_value_to_bson(o)?, }) } @@ -165,7 +171,7 @@ fn get_binary_subtype<'a>(tagged_value: &'a Tagged) -> Result unreachable!(), }), Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined( - i.tagged(tagged_value.tag) + i.tagged(&tagged_value.tag) .coerce_into("converting to BSON binary subtype")?, )), _ => Err(ShellError::type_error( @@ -185,44 +191,40 @@ fn generic_object_value_to_bson(o: &Dictionary) -> Result { Ok(Bson::Document(doc)) } -fn shell_encode_document( - writer: &mut Vec, - doc: Document, - span: Span, -) -> Result<(), ShellError> { +fn shell_encode_document(writer: &mut Vec, doc: Document, tag: Tag) -> Result<(), ShellError> { match encode_document(writer, &doc) { Err(e) => Err(ShellError::labeled_error( format!("Failed to encode document due to: {:?}", e), "requires BSON-compatible document", - span, + tag, )), _ => Ok(()), } } -fn bson_value_to_bytes(bson: Bson, span: Span) -> Result, ShellError> { +fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result, ShellError> { let mut out = Vec::new(); match bson { Bson::Array(a) => { for v in a.into_iter() { match v { - Bson::Document(d) => shell_encode_document(&mut out, d, span)?, + Bson::Document(d) => shell_encode_document(&mut out, d, tag.clone())?, _ => { return Err(ShellError::labeled_error( format!("All top level values must be Documents, got {:?}", v), "requires BSON-compatible document", - span, + &tag, )) } } } } - Bson::Document(d) => shell_encode_document(&mut out, d, span)?, + Bson::Document(d) => shell_encode_document(&mut out, d, tag)?, _ => { return Err(ShellError::labeled_error( format!("All top level values must be Documents, got {:?}", bson), "requires BSON-compatible document", - span, + tag, )) } } @@ -231,22 +233,42 @@ fn bson_value_to_bytes(bson: Bson, span: Span) -> Result, ShellError> { fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let name_span = args.name_span(); - let out = args.input; + let name_tag = args.name_tag(); + let stream = async_stream! { + let input: Vec> = args.input.values.collect().await; - Ok(out - .values - .map( - move |a| match bson_value_to_bytes(value_to_bson_value(&a)?, name_span) { - Ok(x) => ReturnSuccess::value(Value::Binary(x).simple_spanned(name_span)), - _ => Err(ShellError::labeled_error_with_secondary( - "Expected an object with BSON-compatible structure from pipeline", - "requires BSON-compatible input: Must be Array or Object", - name_span, - format!("{} originates from here", a.item.type_name()), - a.span(), - )), - }, - ) - .to_output_stream()) + let to_process_input = if input.len() > 1 { + let tag = input[0].tag.clone(); + vec![Tagged { item: Value::Table(input), tag } ] + } else if input.len() == 1 { + input + } else { + vec![] + }; + + for value in to_process_input { + match value_to_bson_value(&value) { + Ok(bson_value) => { + match bson_value_to_bytes(bson_value, name_tag.clone()) { + Ok(x) => yield ReturnSuccess::value( + Value::binary(x).tagged(&name_tag), + ), + _ => yield Err(ShellError::labeled_error_with_secondary( + "Expected a table with BSON-compatible structure.tag() from pipeline", + "requires BSON-compatible input", + &name_tag, + "originates from here".to_string(), + value.tag(), + )), + } + } + _ => yield Err(ShellError::labeled_error( + "Expected a table with BSON-compatible structure from pipeline", + "requires BSON-compatible input", + &name_tag)) + } + } + }; + + Ok(stream.to_output_stream()) } diff --git a/src/commands/to_csv.rs b/src/commands/to_csv.rs index 58ad208192..d2b46d9f88 100644 --- a/src/commands/to_csv.rs +++ b/src/commands/to_csv.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, Value}; +use crate::data::{Primitive, Value}; use crate::prelude::*; use csv::WriterBuilder; @@ -16,8 +16,10 @@ impl WholeStreamCommand for ToCSV { } fn signature(&self) -> Signature { - Signature::build("to-csv") - .switch("headerless") + Signature::build("to-csv").switch( + "headerless", + "do not output the columns names as the first row", + ) } fn usage(&self) -> &str { @@ -33,50 +35,128 @@ impl WholeStreamCommand for ToCSV { } } -pub fn value_to_csv_value(v: &Value) -> Value { - match v { +pub fn value_to_csv_value(v: &Tagged) -> Tagged { + match &v.item { Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())), Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing), Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())), + Value::Primitive(Primitive::Decimal(f)) => Value::Primitive(Primitive::Decimal(f.clone())), + Value::Primitive(Primitive::Int(i)) => Value::Primitive(Primitive::Int(i.clone())), + Value::Primitive(Primitive::Path(x)) => Value::Primitive(Primitive::Path(x.clone())), Value::Primitive(Primitive::Bytes(b)) => Value::Primitive(Primitive::Bytes(b.clone())), Value::Primitive(Primitive::Date(d)) => Value::Primitive(Primitive::Date(d.clone())), - Value::Object(o) => Value::Object(o.clone()), - Value::List(l) => Value::List(l.clone()), + Value::Row(o) => Value::Row(o.clone()), + Value::Table(l) => Value::Table(l.clone()), Value::Block(_) => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing), } + .tagged(v.tag.clone()) } -fn to_string_helper(v: &Value) -> Result> { - match v { +fn to_string_helper(v: &Tagged) -> Result { + match &v.item { Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()), Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)), Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?), - Value::List(_) => return Ok(String::from("[list list]")), - Value::Object(_) => return Ok(String::from("[object]")), + Value::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?), + Value::Primitive(Primitive::Int(_)) => Ok(v.as_string()?), + Value::Primitive(Primitive::Path(_)) => Ok(v.as_string()?), + Value::Table(_) => return Ok(String::from("[Table]")), + Value::Row(_) => return Ok(String::from("[Row]")), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), - _ => return Err("Bad input".into()), + _ => { + return Err(ShellError::labeled_error( + "Unexpected value", + "", + v.tag.clone(), + )) + } } } -pub fn to_string(v: &Value) -> Result> { +fn merge_descriptors(values: &[Tagged]) -> Vec { + let mut ret = vec![]; + for value in values { + for desc in value.data_descriptors() { + if !ret.contains(&desc) { + ret.push(desc); + } + } + } + ret +} + +pub fn to_string(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; + match v { - Value::Object(o) => { + Value::Row(o) => { let mut wtr = WriterBuilder::new().from_writer(vec![]); let mut fields: VecDeque = VecDeque::new(); let mut values: VecDeque = VecDeque::new(); for (k, v) in o.entries.iter() { fields.push_back(k.clone()); + values.push_back(to_string_helper(&v)?); } wtr.write_record(fields).expect("can not write."); wtr.write_record(values).expect("can not write."); - return Ok(String::from_utf8(wtr.into_inner()?)?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); } - _ => return to_string_helper(&v), + Value::Table(list) => { + let mut wtr = WriterBuilder::new().from_writer(vec![]); + + let merged_descriptors = merge_descriptors(&list); + wtr.write_record(&merged_descriptors) + .expect("can not write."); + + for l in list { + let mut row = vec![]; + for desc in &merged_descriptors { + match l.item.get_data_by_key(&desc) { + Some(s) => { + row.push(to_string_helper(s)?); + } + None => { + row.push(String::new()); + } + } + } + wtr.write_record(&row).expect("can not write"); + } + + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); + } + _ => return to_string_helper(tagged_value), } } @@ -84,30 +164,41 @@ fn to_csv( ToCSVArgs { headerless }: ToCSVArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { - let name_span = name; - let out = input; + let name_tag = name; + let stream = async_stream! { + let input: Vec> = input.values.collect().await; - Ok(out - .values - .map(move |a| match to_string(&value_to_csv_value(&a.item)) { - Ok(x) => { - let converted = if headerless { - x.lines().skip(1).collect() - } else { - x - }; + let to_process_input = if input.len() > 1 { + let tag = input[0].tag.clone(); + vec![Tagged { item: Value::Table(input), tag } ] + } else if input.len() == 1 { + input + } else { + vec![] + }; - ReturnSuccess::value( - Value::Primitive(Primitive::String(converted)).simple_spanned(name_span), - ) - } - _ => Err(ShellError::labeled_error_with_secondary( - "Expected an object with CSV-compatible structure from pipeline", - "requires CSV-compatible input", - name_span, - format!("{} originates from here", a.item.type_name()), - a.span(), - )), - }) - .to_output_stream()) + for value in to_process_input { + match to_string(&value_to_csv_value(&value)) { + Ok(x) => { + let converted = if headerless { + x.lines().skip(1).collect() + } else { + x + }; + yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag)) + } + _ => { + yield Err(ShellError::labeled_error_with_secondary( + "Expected a table with CSV-compatible structure.tag() from pipeline", + "requires CSV-compatible input", + &name_tag, + "originates from here".to_string(), + value.tag(), + )) + } + } + } + }; + + Ok(stream.to_output_stream()) } diff --git a/src/commands/to_json.rs b/src/commands/to_json.rs index 9849e691e3..40edc5aeb8 100644 --- a/src/commands/to_json.rs +++ b/src/commands/to_json.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, Value}; +use crate::data::{Primitive, Value}; use crate::prelude::*; pub struct ToJSON; @@ -42,22 +42,24 @@ pub fn value_to_json_value(v: &Tagged) -> Result serde_json::Value::Number(serde_json::Number::from( - CoerceInto::::coerce_into(i.tagged(v.tag), "converting to JSON number")?, + CoerceInto::::coerce_into(i.tagged(&v.tag), "converting to JSON number")?, )), Value::Primitive(Primitive::Nothing) => serde_json::Value::Null, + Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()), Value::Primitive(Primitive::String(s)) => serde_json::Value::String(s.clone()), Value::Primitive(Primitive::Path(s)) => serde_json::Value::String(s.display().to_string()), - Value::List(l) => serde_json::Value::Array(json_list(l)?), + Value::Table(l) => serde_json::Value::Array(json_list(l)?), + Value::Error(e) => return Err(e.clone()), Value::Block(_) => serde_json::Value::Null, - Value::Binary(b) => serde_json::Value::Array( + Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array( b.iter() .map(|x| { serde_json::Value::Number(serde_json::Number::from_f64(*x as f64).unwrap()) }) .collect(), ), - Value::Object(o) => { + Value::Row(o) => { let mut m = serde_json::Map::new(); for (k, v) in o.entries.iter() { m.insert(k.clone(), value_to_json_value(v)?); @@ -79,24 +81,42 @@ fn json_list(input: &Vec>) -> Result, Shell fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let name_span = args.name_span(); - let out = args.input; + let name_tag = args.name_tag(); + let stream = async_stream! { + let input: Vec> = args.input.values.collect().await; - Ok(out - .values - .map( - move |a| match serde_json::to_string(&value_to_json_value(&a)?) { - Ok(x) => ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).simple_spanned(name_span), - ), - _ => Err(ShellError::labeled_error_with_secondary( - "Expected an object with JSON-compatible structure from pipeline", + let to_process_input = if input.len() > 1 { + let tag = input[0].tag.clone(); + vec![Tagged { item: Value::Table(input), tag } ] + } else if input.len() == 1 { + input + } else { + vec![] + }; + + for value in to_process_input { + match value_to_json_value(&value) { + Ok(json_value) => { + match serde_json::to_string(&json_value) { + Ok(x) => yield ReturnSuccess::value( + Value::Primitive(Primitive::String(x)).tagged(&name_tag), + ), + _ => yield Err(ShellError::labeled_error_with_secondary( + "Expected a table with JSON-compatible structure.tag() from pipeline", + "requires JSON-compatible input", + &name_tag, + "originates from here".to_string(), + value.tag(), + )), + } + } + _ => yield Err(ShellError::labeled_error( + "Expected a table with JSON-compatible structure from pipeline", "requires JSON-compatible input", - name_span, - format!("{} originates from here", a.item.type_name()), - a.span(), - )), - }, - ) - .to_output_stream()) + &name_tag)) + } + } + }; + + Ok(stream.to_output_stream()) } diff --git a/src/commands/to_sqlite.rs b/src/commands/to_sqlite.rs index dca953c235..4f9181ec7c 100644 --- a/src/commands/to_sqlite.rs +++ b/src/commands/to_sqlite.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Dictionary, Primitive, Value}; +use crate::data::{Dictionary, Primitive, Value}; use crate::prelude::*; use hex::encode; use rusqlite::{Connection, NO_PARAMS}; @@ -27,6 +27,10 @@ impl WholeStreamCommand for ToSQLite { ) -> Result { to_sqlite(args, registry) } + + fn is_binary(&self) -> bool { + true + } } pub struct ToDB; @@ -51,6 +55,10 @@ impl WholeStreamCommand for ToDB { ) -> Result { to_sqlite(args, registry) } + + fn is_binary(&self) -> bool { + true + } } fn comma_concat(acc: String, current: String) -> String { @@ -63,7 +71,7 @@ fn comma_concat(acc: String, current: String) -> String { fn get_columns(rows: &Vec>) -> Result { match &rows[0].item { - Value::Object(d) => Ok(d + Value::Row(d) => Ok(d .entries .iter() .map(|(k, _v)| k.clone()) @@ -77,17 +85,18 @@ fn get_columns(rows: &Vec>) -> Result { fn nu_value_to_sqlite_string(v: Value) -> String { match v { - Value::Binary(u) => format!("x'{}'", encode(u)), Value::Primitive(p) => match p { Primitive::Nothing => "NULL".into(), Primitive::Int(i) => format!("{}", i), Primitive::Decimal(f) => format!("{}", f), Primitive::Bytes(u) => format!("{}", u), + Primitive::Pattern(s) => format!("'{}'", s.replace("'", "''")), Primitive::String(s) => format!("'{}'", s.replace("'", "''")), Primitive::Boolean(true) => "1".into(), Primitive::Boolean(_) => "0".into(), Primitive::Date(d) => format!("'{}'", d), Primitive::Path(p) => format!("'{}'", p.display().to_string().replace("'", "''")), + Primitive::Binary(u) => format!("x'{}'", encode(u)), Primitive::BeginningOfStream => "NULL".into(), Primitive::EndOfStream => "NULL".into(), }, @@ -99,7 +108,7 @@ fn get_insert_values(rows: Vec>) -> Result let values: Result, _> = rows .into_iter() .map(|value| match value.item { - Value::Object(d) => Ok(format!( + Value::Row(d) => Ok(format!( "({})", d.entries .iter() @@ -131,7 +140,7 @@ fn generate_statements(table: Dictionary) -> Result<(String, String), std::io::E }; let (columns, insert_values) = match table.entries.get("table_values") { Some(Tagged { - item: Value::List(l), + item: Value::Table(l), .. }) => (get_columns(l), get_insert_values(l.to_vec())), _ => { @@ -161,7 +170,7 @@ fn sqlite_input_stream_to_bytes( let tag = values[0].tag.clone(); for value in values.into_iter() { match value.item() { - Value::Object(d) => { + Value::Row(d) => { let (create, insert) = generate_statements(d.to_owned())?; match conn .execute(&create, NO_PARAMS) @@ -179,33 +188,33 @@ fn sqlite_input_stream_to_bytes( other => { return Err(std::io::Error::new( std::io::ErrorKind::Other, - format!("Expected object, found {:?}", other), + format!("Expected row, found {:?}", other), )) } } } let mut out = Vec::new(); tempfile.read_to_end(&mut out)?; - Ok(Value::Binary(out).tagged(tag)) + Ok(Value::binary(out).tagged(tag)) } fn to_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let name_span = args.name_span(); - let stream = async_stream_block! { - let values: Vec<_> = args.input.into_vec().await; - match sqlite_input_stream_to_bytes(values) { - Ok(out) => { - yield ReturnSuccess::value(out) - } - Err(_) => { + let name_tag = args.name_tag(); + let stream = async_stream! { + let input: Vec> = args.input.values.collect().await; + + match sqlite_input_stream_to_bytes(input) { + Ok(out) => yield ReturnSuccess::value(out), + _ => { yield Err(ShellError::labeled_error( - "Expected an object with SQLite-compatible structure from pipeline", + "Expected a table with SQLite-compatible structure.tag() from pipeline", "requires SQLite-compatible input", - name_span, - )) - } - }; + name_tag, + )) + }, + } }; + Ok(stream.to_output_stream()) } diff --git a/src/commands/to_toml.rs b/src/commands/to_toml.rs index 8c44e21b2c..778fdd2561 100644 --- a/src/commands/to_toml.rs +++ b/src/commands/to_toml.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, Value}; +use crate::data::{Primitive, Value}; use crate::prelude::*; pub struct ToTOML; @@ -38,21 +38,23 @@ pub fn value_to_toml_value(v: &Tagged) -> Result toml::Value::String("".to_string()) } Value::Primitive(Primitive::Decimal(f)) => { - toml::Value::Float(f.tagged(v.tag).coerce_into("converting to TOML float")?) + toml::Value::Float(f.tagged(&v.tag).coerce_into("converting to TOML float")?) } Value::Primitive(Primitive::Int(i)) => { - toml::Value::Integer(i.tagged(v.tag).coerce_into("converting to TOML integer")?) + toml::Value::Integer(i.tagged(&v.tag).coerce_into("converting to TOML integer")?) } Value::Primitive(Primitive::Nothing) => toml::Value::String("".to_string()), + Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()), Value::Primitive(Primitive::String(s)) => toml::Value::String(s.clone()), Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()), - Value::List(l) => toml::Value::Array(collect_values(l)?), + Value::Table(l) => toml::Value::Array(collect_values(l)?), + Value::Error(e) => return Err(e.clone()), Value::Block(_) => toml::Value::String("".to_string()), - Value::Binary(b) => { + Value::Primitive(Primitive::Binary(b)) => { toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect()) } - Value::Object(o) => { + Value::Row(o) => { let mut m = toml::map::Map::new(); for (k, v) in o.entries.iter() { m.insert(k.clone(), value_to_toml_value(v)?); @@ -74,24 +76,42 @@ fn collect_values(input: &Vec>) -> Result, ShellE fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result { let args = args.evaluate_once(registry)?; - let name_span = args.name_span(); - let out = args.input; + let name_tag = args.name_tag(); + let stream = async_stream! { + let input: Vec> = args.input.values.collect().await; - Ok(out - .values - .map(move |a| match toml::to_string(&value_to_toml_value(&a)?) { - Ok(val) => { - return ReturnSuccess::value( - Value::Primitive(Primitive::String(val)).simple_spanned(name_span), - ) + let to_process_input = if input.len() > 1 { + let tag = input[0].tag.clone(); + vec![Tagged { item: Value::Table(input), tag } ] + } else if input.len() == 1 { + input + } else { + vec![] + }; + + for value in to_process_input { + match value_to_toml_value(&value) { + Ok(toml_value) => { + match toml::to_string(&toml_value) { + Ok(x) => yield ReturnSuccess::value( + Value::Primitive(Primitive::String(x)).tagged(&name_tag), + ), + _ => yield Err(ShellError::labeled_error_with_secondary( + "Expected a table with TOML-compatible structure.tag() from pipeline", + "requires TOML-compatible input", + &name_tag, + "originates from here".to_string(), + value.tag(), + )), + } + } + _ => yield Err(ShellError::labeled_error( + "Expected a table with TOML-compatible structure from pipeline", + "requires TOML-compatible input", + &name_tag)) } - _ => Err(ShellError::labeled_error_with_secondary( - "Expected an object with TOML-compatible structure from pipeline", - "requires TOML-compatible input", - name_span, - format!("{} originates from here", a.item.type_name()), - a.span(), - )), - }) - .to_output_stream()) + } + }; + + Ok(stream.to_output_stream()) } diff --git a/src/commands/to_tsv.rs b/src/commands/to_tsv.rs index 1a229d768e..7857d1eeec 100644 --- a/src/commands/to_tsv.rs +++ b/src/commands/to_tsv.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, Value}; +use crate::data::{Primitive, Value}; use crate::prelude::*; use csv::WriterBuilder; @@ -16,8 +16,10 @@ impl WholeStreamCommand for ToTSV { } fn signature(&self) -> Signature { - Signature::build("to-tsv") - .switch("headerless") + Signature::build("to-tsv").switch( + "headerless", + "do not output the column names as the first row", + ) } fn usage(&self) -> &str { @@ -33,35 +35,65 @@ impl WholeStreamCommand for ToTSV { } } -pub fn value_to_tsv_value(v: &Value) -> Value { +pub fn value_to_tsv_value(tagged_value: &Tagged) -> Tagged { + let v = &tagged_value.item; + match v { Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())), Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing), Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())), + Value::Primitive(Primitive::Decimal(f)) => Value::Primitive(Primitive::Decimal(f.clone())), + Value::Primitive(Primitive::Int(i)) => Value::Primitive(Primitive::Int(i.clone())), + Value::Primitive(Primitive::Path(x)) => Value::Primitive(Primitive::Path(x.clone())), Value::Primitive(Primitive::Bytes(b)) => Value::Primitive(Primitive::Bytes(b.clone())), Value::Primitive(Primitive::Date(d)) => Value::Primitive(Primitive::Date(d.clone())), - Value::Object(o) => Value::Object(o.clone()), - Value::List(l) => Value::List(l.clone()), + Value::Row(o) => Value::Row(o.clone()), + Value::Table(l) => Value::Table(l.clone()), Value::Block(_) => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing), } + .tagged(&tagged_value.tag) } -fn to_string_helper(v: &Value) -> Result> { +fn to_string_helper(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; match v { Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()), Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)), - Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?), - Value::List(_) => return Ok(String::from("[list list]")), - Value::Object(_) => return Ok(String::from("[object]")), + Value::Primitive(Primitive::Boolean(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Decimal(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Int(_)) => Ok(tagged_value.as_string()?), + Value::Primitive(Primitive::Path(_)) => Ok(tagged_value.as_string()?), + Value::Table(_) => return Ok(String::from("[table]")), + Value::Row(_) => return Ok(String::from("[row]")), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), - _ => return Err("Bad input".into()), + _ => { + return Err(ShellError::labeled_error( + "Unexpected value", + "original value", + &tagged_value.tag, + )) + } } } -pub fn to_string(v: &Value) -> Result> { +fn merge_descriptors(values: &[Tagged]) -> Vec { + let mut ret = vec![]; + for value in values { + for desc in value.data_descriptors() { + if !ret.contains(&desc) { + ret.push(desc); + } + } + } + ret +} + +pub fn to_string(tagged_value: &Tagged) -> Result { + let v = &tagged_value.item; + match v { - Value::Object(o) => { + Value::Row(o) => { let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]); let mut fields: VecDeque = VecDeque::new(); let mut values: VecDeque = VecDeque::new(); @@ -74,9 +106,59 @@ pub fn to_string(v: &Value) -> Result> { wtr.write_record(fields).expect("can not write."); wtr.write_record(values).expect("can not write."); - return Ok(String::from_utf8(wtr.into_inner()?)?); + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); } - _ => return to_string_helper(&v), + Value::Table(list) => { + let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]); + + let merged_descriptors = merge_descriptors(&list); + wtr.write_record(&merged_descriptors) + .expect("can not write."); + + for l in list { + let mut row = vec![]; + for desc in &merged_descriptors { + match l.item.get_data_by_key(&desc) { + Some(s) => { + row.push(to_string_helper(s)?); + } + None => { + row.push(String::new()); + } + } + } + wtr.write_record(&row).expect("can not write"); + } + + return Ok(String::from_utf8(wtr.into_inner().map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?) + .map_err(|_| { + ShellError::labeled_error( + "Could not convert record", + "original value", + &tagged_value.tag, + ) + })?); + } + _ => return to_string_helper(tagged_value), } } @@ -84,30 +166,41 @@ fn to_tsv( ToTSVArgs { headerless }: ToTSVArgs, RunnableContext { input, name, .. }: RunnableContext, ) -> Result { - let name_span = name; - let out = input; + let name_tag = name; + let stream = async_stream! { + let input: Vec> = input.values.collect().await; - Ok(out - .values - .map(move |a| match to_string(&value_to_tsv_value(&a.item)) { - Ok(x) => { - let converted = if headerless { - x.lines().skip(1).collect() - } else { - x - }; + let to_process_input = if input.len() > 1 { + let tag = input[0].tag.clone(); + vec![Tagged { item: Value::Table(input), tag } ] + } else if input.len() == 1 { + input + } else { + vec![] + }; - ReturnSuccess::value( - Value::Primitive(Primitive::String(converted)).simple_spanned(name_span), - ) - } - _ => Err(ShellError::labeled_error_with_secondary( - "Expected an object with TSV-compatible structure from pipeline", - "requires TSV-compatible input", - name_span, - format!("{} originates from here", a.item.type_name()), - a.span(), - )), - }) - .to_output_stream()) + for value in to_process_input { + match to_string(&value_to_tsv_value(&value)) { + Ok(x) => { + let converted = if headerless { + x.lines().skip(1).collect() + } else { + x + }; + yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag)) + } + _ => { + yield Err(ShellError::labeled_error_with_secondary( + "Expected a table with TSV-compatible structure.tag() from pipeline", + "requires TSV-compatible input", + &name_tag, + "originates from here".to_string(), + value.tag(), + )) + } + } + } + }; + + Ok(stream.to_output_stream()) } diff --git a/src/commands/to_url.rs b/src/commands/to_url.rs new file mode 100644 index 0000000000..8dee0a87d5 --- /dev/null +++ b/src/commands/to_url.rs @@ -0,0 +1,85 @@ +use crate::commands::WholeStreamCommand; +use crate::data::Value; +use crate::prelude::*; + +pub struct ToURL; + +impl WholeStreamCommand for ToURL { + fn name(&self) -> &str { + "to-url" + } + + fn signature(&self) -> Signature { + Signature::build("to-url") + } + + fn usage(&self) -> &str { + "Convert table into url-encoded text" + } + + fn run( + &self, + args: CommandArgs, + registry: &CommandRegistry, + ) -> Result { + to_url(args, registry) + } +} + +fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result { + let args = args.evaluate_once(registry)?; + let tag = args.name_tag(); + let input = args.input; + + let stream = async_stream! { + let input: Vec> = input.values.collect().await; + + for value in input { + match value { + Tagged { item: Value::Row(row), .. } => { + let mut row_vec = vec![]; + for (k,v) in row.entries { + match v.as_string() { + Ok(s) => { + row_vec.push((k.clone(), s)); + } + _ => { + yield Err(ShellError::labeled_error_with_secondary( + "Expected table with string values", + "requires table with strings", + &tag, + "value originates from here", + v.tag, + )) + } + } + } + + match serde_urlencoded::to_string(row_vec) { + Ok(s) => { + yield ReturnSuccess::value(Value::string(s).tagged(&tag)); + } + _ => { + yield Err(ShellError::labeled_error( + "Failed to convert to url-encoded", + "cannot url-encode", + &tag, + )) + } + } + } + Tagged { tag: value_tag, .. } => { + yield Err(ShellError::labeled_error_with_secondary( + "Expected a table from pipeline", + "requires table input", + &tag, + "value originates from here", + value_tag, + )) + } + } + } + }; + + Ok(stream.to_output_stream()) +} diff --git a/src/commands/to_yaml.rs b/src/commands/to_yaml.rs index 8ef6e90da2..6019561d66 100644 --- a/src/commands/to_yaml.rs +++ b/src/commands/to_yaml.rs @@ -1,5 +1,5 @@ use crate::commands::WholeStreamCommand; -use crate::object::{Primitive, Value}; +use crate::data::{Primitive, Value}; use crate::prelude::*; pub struct ToYAML; @@ -39,13 +39,14 @@ pub fn value_to_yaml_value(v: &Tagged) -> Result serde_yaml::Value::Number(serde_yaml::Number::from( - CoerceInto::::coerce_into(i.tagged(v.tag), "converting to YAML number")?, + CoerceInto::::coerce_into(i.tagged(&v.tag), "converting to YAML number")?, )), Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null, + Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()), Value::Primitive(Primitive::String(s)) => serde_yaml::Value::String(s.clone()), Value::Primitive(Primitive::Path(s)) => serde_yaml::Value::String(s.display().to_string()), - Value::List(l) => { + Value::Table(l) => { let mut out = vec![]; for value in l { @@ -54,13 +55,14 @@ pub fn value_to_yaml_value(v: &Tagged) -> Result return Err(e.clone()), Value::Block(_) => serde_yaml::Value::Null, - Value::Binary(b) => serde_yaml::Value::Sequence( + Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence( b.iter() .map(|x| serde_yaml::Value::Number(serde_yaml::Number::from(*x))) .collect(), ), - Value::Object(o) => { + Value::Row(o) => { let mut m = serde_yaml::Mapping::new(); for (k, v) in o.entries.iter() { m.insert( @@ -75,23 +77,42 @@ pub fn value_to_yaml_value(v: &Tagged) -> Result Result { let args = args.evaluate_once(registry)?; - let name_span = args.name_span(); - let out = args.input; - Ok(out - .values - .map( - move |a| match serde_yaml::to_string(&value_to_yaml_value(&a)?) { - Ok(x) => ReturnSuccess::value( - Value::Primitive(Primitive::String(x)).simple_spanned(name_span), - ), - _ => Err(ShellError::labeled_error_with_secondary( - "Expected an object with YAML-compatible structure from pipeline", + let name_tag = args.name_tag(); + let stream = async_stream! { + let input: Vec> = args.input.values.collect().await; + + let to_process_input = if input.len() > 1 { + let tag = input[0].tag.clone(); + vec![Tagged { item: Value::Table(input), tag } ] + } else if input.len() == 1 { + input + } else { + vec![] + }; + + for value in to_process_input { + match value_to_yaml_value(&value) { + Ok(yaml_value) => { + match serde_yaml::to_string(&yaml_value) { + Ok(x) => yield ReturnSuccess::value( + Value::Primitive(Primitive::String(x)).tagged(&name_tag), + ), + _ => yield Err(ShellError::labeled_error_with_secondary( + "Expected a table with YAML-compatible structure.tag() from pipeline", + "requires YAML-compatible input", + &name_tag, + "originates from here".to_string(), + value.tag(), + )), + } + } + _ => yield Err(ShellError::labeled_error( + "Expected a table with YAML-compatible structure from pipeline", "requires YAML-compatible input", - name_span, - format!("{} originates from here", a.item.type_name()), - a.span(), - )), - }, - ) - .to_output_stream()) + &name_tag)) + } + } + }; + + Ok(stream.to_output_stream()) } diff --git a/src/commands/trim.rs b/src/commands/trim.rs index 66152843f0..11ed025394 100644 --- a/src/commands/trim.rs +++ b/src/commands/trim.rs @@ -1,6 +1,6 @@ use crate::commands::WholeStreamCommand; +use crate::data::Value; use crate::errors::ShellError; -use crate::object::Value; use crate::prelude::*; pub struct Trim; @@ -34,7 +34,7 @@ fn trim(args: CommandArgs, _registry: &CommandRegistry) -> Result Result { let args = args.evaluate_once(registry)?; - let span = args.call_info.name_span; + let tag = args.call_info.name_tag.clone(); let mut indexmap = IndexMap::new(); indexmap.insert( "version".to_string(), - Tagged::from_simple_spanned_item(Value::string(clap::crate_version!()), span), + Value::string(clap::crate_version!()).tagged(&tag), ); - let value = Tagged::from_simple_spanned_item(Value::Object(Dictionary::from(indexmap)), span); + let value = Value::Row(Dictionary::from(indexmap)).tagged(&tag); Ok(OutputStream::one(value)) } diff --git a/src/commands/vtable.rs b/src/commands/vtable.rs deleted file mode 100644 index 5abd4c6d1f..0000000000 --- a/src/commands/vtable.rs +++ /dev/null @@ -1,47 +0,0 @@ -use crate::commands::WholeStreamCommand; -use crate::errors::ShellError; -use crate::format::VTableView; -use crate::prelude::*; - -pub struct VTable; - -#[derive(Deserialize)] -pub struct VTableArgs {} - -impl WholeStreamCommand for VTable { - fn name(&self) -> &str { - "vtable" - } - - fn signature(&self) -> Signature { - Signature::build("vtable") - } - - fn usage(&self) -> &str { - "View the contents of the pipeline as a vertical (rotated) table." - } - - fn run( - &self, - args: CommandArgs, - registry: &CommandRegistry, - ) -> Result { - args.process(registry, vtable)?.run() - } -} - -pub fn vtable(_args: VTableArgs, context: RunnableContext) -> Result { - let stream = async_stream_block! { - let input = context.input.into_vec().await; - - if input.len() > 0 { - let mut host = context.host.lock().unwrap(); - let view = VTableView::from_list(&input); - if let Some(view) = view { - handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host)); - } - } - }; - - Ok(OutputStream::new(stream)) -} diff --git a/src/commands/where_.rs b/src/commands/where_.rs index b09c341b4a..ce7367b1a6 100644 --- a/src/commands/where_.rs +++ b/src/commands/where_.rs @@ -1,6 +1,6 @@ use crate::commands::PerItemCommand; use crate::errors::ShellError; -use crate::parser::hir::SyntaxType; +use crate::parser::hir::SyntaxShape; use crate::parser::registry; use crate::prelude::*; @@ -12,8 +12,11 @@ impl PerItemCommand for Where { } fn signature(&self) -> registry::Signature { - Signature::build("where") - .required("condition", SyntaxType::Block) + Signature::build("where").required( + "condition", + SyntaxShape::Block, + "the condition that must match", + ) } fn usage(&self) -> &str { @@ -43,16 +46,14 @@ impl PerItemCommand for Where { VecDeque::new() } } - Err(e) => { - return Err(e) - } + Err(e) => return Err(e), } } Tagged { tag, .. } => { return Err(ShellError::labeled_error( "Expected a condition", "where needs a condition", - tag.span, + tag, )) } }; diff --git a/src/commands/which_.rs b/src/commands/which_.rs index fdd722d892..405efe7dca 100644 --- a/src/commands/which_.rs +++ b/src/commands/which_.rs @@ -1,5 +1,5 @@ +use crate::data::Value; use crate::errors::ShellError; -use crate::object::Value; use crate::prelude::*; use crate::commands::WholeStreamCommand; @@ -13,8 +13,11 @@ impl WholeStreamCommand for Which { } fn signature(&self) -> Signature { - Signature::build("which") - .required("name", SyntaxType::Any) + Signature::build("which").required( + "name", + SyntaxShape::Any, + "the name of the command to find the path to", + ) } fn usage(&self) -> &str { @@ -34,7 +37,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result 0 { @@ -53,7 +56,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result Result); - -impl SourceMap { - pub fn insert(&mut self, uuid: Uuid, span_source: SpanSource) { - self.0.insert(uuid, span_source); - } - - pub fn get(&self, uuid: &Uuid) -> Option<&SpanSource> { - self.0.get(uuid) - } - - pub fn new() -> SourceMap { - SourceMap(HashMap::new()) - } -} - #[derive(Clone, new)] pub struct CommandRegistry { #[new(value = "Arc::new(Mutex::new(IndexMap::default()))")] @@ -53,13 +34,17 @@ impl CommandRegistry { registry.get(name).map(|c| c.clone()) } + pub(crate) fn expect_command(&self, name: &str) -> Arc { + self.get_command(name).unwrap() + } + pub(crate) fn has(&self, name: &str) -> bool { let registry = self.registry.lock().unwrap(); registry.contains_key(name) } - fn insert(&mut self, name: impl Into, command: Arc) { + pub(crate) fn insert(&mut self, name: impl Into, command: Arc) { let mut registry = self.registry.lock().unwrap(); registry.insert(name.into(), command); } @@ -73,8 +58,8 @@ impl CommandRegistry { #[derive(Clone)] pub struct Context { registry: CommandRegistry, - pub(crate) source_map: SourceMap, host: Arc>, + pub ctrl_c: Arc, pub(crate) shell_manager: ShellManager, } @@ -83,12 +68,20 @@ impl Context { &self.registry } + pub(crate) fn expand_context<'context>( + &'context self, + source: &'context Text, + span: Span, + ) -> ExpandContext<'context> { + ExpandContext::new(&self.registry, span, source, self.shell_manager.homedir()) + } + pub(crate) fn basic() -> Result> { let registry = CommandRegistry::new(); Ok(Context { registry: registry.clone(), - source_map: SourceMap::new(), host: Arc::new(Mutex::new(crate::env::host::BasicHost)), + ctrl_c: Arc::new(AtomicBool::new(false)), shell_manager: ShellManager::basic(registry)?, }) } @@ -105,43 +98,32 @@ impl Context { } } - pub fn add_span_source(&mut self, uuid: Uuid, span_source: SpanSource) { - self.source_map.insert(uuid, span_source); + pub(crate) fn get_command(&self, name: &str) -> Option> { + self.registry.get_command(name) } - pub(crate) fn has_command(&self, name: &str) -> bool { - self.registry.has(name) - } - - pub(crate) fn get_command(&self, name: &str) -> Arc { - self.registry.get_command(name).unwrap() + pub(crate) fn expect_command(&self, name: &str) -> Arc { + self.registry.expect_command(name) } pub(crate) fn run_command<'a>( &mut self, command: Arc, - name_span: Span, - source_map: SourceMap, + name_tag: Tag, args: hir::Call, source: &Text, input: InputStream, + is_first_command: bool, ) -> OutputStream { - let command_args = self.command_args(args, input, source, source_map, name_span); - command.run(command_args, self.registry()) + let command_args = self.command_args(args, input, source, name_tag); + command.run(command_args, self.registry(), is_first_command) } - fn call_info( - &self, - args: hir::Call, - source: &Text, - source_map: SourceMap, - name_span: Span, - ) -> UnevaluatedCallInfo { + fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo { UnevaluatedCallInfo { args, source: source.clone(), - source_map, - name_span, + name_tag, } } @@ -150,13 +132,13 @@ impl Context { args: hir::Call, input: InputStream, source: &Text, - source_map: SourceMap, - name_span: Span, + name_tag: Tag, ) -> CommandArgs { CommandArgs { host: self.host.clone(), + ctrl_c: self.ctrl_c.clone(), shell_manager: self.shell_manager.clone(), - call_info: self.call_info(args, source, source_map, name_span), + call_info: self.call_info(args, source, name_tag), input, } } diff --git a/src/object.rs b/src/data.rs similarity index 100% rename from src/object.rs rename to src/data.rs diff --git a/src/object/base.rs b/src/data/base.rs similarity index 53% rename from src/object/base.rs rename to src/data/base.rs index 552d2c86de..bc567f0dfe 100644 --- a/src/object/base.rs +++ b/src/data/base.rs @@ -1,28 +1,87 @@ use crate::context::CommandRegistry; +use crate::data::TaggedDictBuilder; use crate::errors::ShellError; use crate::evaluate::{evaluate_baseline_expr, Scope}; -use crate::object::TaggedDictBuilder; use crate::parser::{hir, Operator}; use crate::prelude::*; use crate::Text; use chrono::{DateTime, Utc}; use chrono_humanize::Humanize; use derive_new::new; +use indexmap::IndexMap; +use log::trace; use serde::{Deserialize, Serialize}; use std::fmt; use std::path::PathBuf; use std::time::SystemTime; +mod serde_bigint { + use num_traits::cast::FromPrimitive; + use num_traits::cast::ToPrimitive; + + pub fn serialize(big_int: &super::BigInt, serializer: S) -> Result + where + S: serde::Serializer, + { + serde::Serialize::serialize( + &big_int + .to_i64() + .ok_or(serde::ser::Error::custom("expected a i64-sized bignum"))?, + serializer, + ) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let x: i64 = serde::Deserialize::deserialize(deserializer)?; + Ok(super::BigInt::from_i64(x) + .ok_or(serde::de::Error::custom("expected a i64-sized bignum"))?) + } +} + +mod serde_bigdecimal { + use num_traits::cast::FromPrimitive; + use num_traits::cast::ToPrimitive; + + pub fn serialize(big_decimal: &super::BigDecimal, serializer: S) -> Result + where + S: serde::Serializer, + { + serde::Serialize::serialize( + &big_decimal + .to_f64() + .ok_or(serde::ser::Error::custom("expected a f64-sized bignum"))?, + serializer, + ) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let x: f64 = serde::Deserialize::deserialize(deserializer)?; + Ok(super::BigDecimal::from_f64(x) + .ok_or(serde::de::Error::custom("expected a f64-sized bigdecimal"))?) + } +} + #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Deserialize, Serialize)] pub enum Primitive { Nothing, + #[serde(with = "serde_bigint")] Int(BigInt), + #[serde(with = "serde_bigdecimal")] Decimal(BigDecimal), Bytes(u64), String(String), + Pattern(String), Boolean(bool), Date(DateTime), Path(PathBuf), + #[serde(with = "serde_bytes")] + Binary(Vec), // Stream markers (used as bookend markers rather than actual values) BeginningOfStream, @@ -53,9 +112,11 @@ impl Primitive { Int(_) => "int", Decimal(_) => "decimal", Bytes(_) => "bytes", + Pattern(_) => "pattern", String(_) => "string", Boolean(_) => "boolean", Date(_) => "date", + Binary(_) => "binary", } .to_string() } @@ -71,9 +132,11 @@ impl Primitive { Path(path) => write!(f, "{}", path.display()), Decimal(decimal) => write!(f, "{}", decimal), Bytes(bytes) => write!(f, "{}", bytes), + Pattern(string) => write!(f, "{:?}", string), String(string) => write!(f, "{:?}", string), Boolean(boolean) => write!(f, "{}", boolean), Date(date) => write!(f, "{}", date), + Binary(binary) => write!(f, "{:?}", binary), } } @@ -108,6 +171,7 @@ impl Primitive { } Primitive::Int(i) => format!("{}", i), Primitive::Decimal(decimal) => format!("{}", decimal), + Primitive::Pattern(s) => format!("{}", s), Primitive::String(s) => format!("{}", s), Primitive::Boolean(b) => match (b, field_name) { (true, None) => format!("Yes"), @@ -117,6 +181,7 @@ impl Primitive { (true, Some(_)) => format!("Yes"), (false, Some(_)) => format!("No"), }, + Primitive::Binary(_) => format!(""), Primitive::Date(d) => format!("{}", d.humanize()), } } @@ -141,7 +206,7 @@ pub struct Operation { pub struct Block { pub(crate) expressions: Vec, pub(crate) source: Text, - pub(crate) span: Span, + pub(crate) tag: Tag, } impl Block { @@ -149,11 +214,19 @@ impl Block { let scope = Scope::new(value.clone()); if self.expressions.len() == 0 { - return Ok(Value::nothing().simple_spanned(self.span)); + return Ok(Value::nothing().tagged(&self.tag)); } let mut last = None; + trace!( + "EXPRS = {:?}", + self.expressions + .iter() + .map(|e| format!("{}", e)) + .collect::>() + ); + for expr in self.expressions.iter() { last = Some(evaluate_baseline_expr( &expr, @@ -170,10 +243,11 @@ impl Block { #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Serialize, Deserialize)] pub enum Value { Primitive(Primitive), - Object(crate::object::Dictionary), - #[serde(with = "serde_bytes")] - Binary(Vec), - List(Vec>), + Row(crate::data::Dictionary), + Table(Vec>), + + // Errors are a type of value too + Error(ShellError), Block(Block), } @@ -220,18 +294,18 @@ impl fmt::Debug for ValueDebug<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.value.item() { Value::Primitive(p) => p.debug(f), - Value::Object(o) => o.debug(f), - Value::List(l) => debug_list(l).fmt(f), + Value::Row(o) => o.debug(f), + Value::Table(l) => debug_list(l).fmt(f), Value::Block(_) => write!(f, "[[block]]"), - Value::Binary(_) => write!(f, "[[binary]]"), + Value::Error(_) => write!(f, "[[error]]"), } } } impl Tagged { - pub(crate) fn tagged_type_name(&self) -> Tagged { + pub fn tagged_type_name(&self) -> Tagged { let name = self.type_name(); - Tagged::from_item(name, self.tag()) + name.tagged(self.tag()) } } @@ -243,7 +317,7 @@ impl std::convert::TryFrom<&Tagged> for Block { Value::Block(block) => Ok(block.clone()), v => Err(ShellError::type_error( "Block", - value.copy_span(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -255,11 +329,11 @@ impl std::convert::TryFrom<&Tagged> for i64 { fn try_from(value: &Tagged) -> Result { match value.item() { Value::Primitive(Primitive::Int(int)) => { - int.tagged(value.tag).coerce_into("converting to i64") + int.tagged(&value.tag).coerce_into("converting to i64") } v => Err(ShellError::type_error( "Integer", - value.copy_span(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -273,7 +347,7 @@ impl std::convert::TryFrom<&Tagged> for String { Value::Primitive(Primitive::String(s)) => Ok(s.clone()), v => Err(ShellError::type_error( "String", - value.copy_span(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -284,24 +358,24 @@ impl std::convert::TryFrom<&Tagged> for Vec { fn try_from(value: &Tagged) -> Result, ShellError> { match value.item() { - Value::Binary(b) => Ok(b.clone()), + Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()), v => Err(ShellError::type_error( "Binary", - value.copy_span(v.type_name()), + v.type_name().tagged(value.tag()), )), } } } -impl<'a> std::convert::TryFrom<&'a Tagged> for &'a crate::object::Dictionary { +impl<'a> std::convert::TryFrom<&'a Tagged> for &'a crate::data::Dictionary { type Error = ShellError; - fn try_from(value: &'a Tagged) -> Result<&'a crate::object::Dictionary, ShellError> { + fn try_from(value: &'a Tagged) -> Result<&'a crate::data::Dictionary, ShellError> { match value.item() { - Value::Object(d) => Ok(d), + Value::Row(d) => Ok(d), v => Err(ShellError::type_error( "Dictionary", - value.copy_span(v.type_name()), + v.type_name().tagged(value.tag()), )), } } @@ -323,7 +397,7 @@ impl std::convert::TryFrom>> for Switch { Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present), v => Err(ShellError::type_error( "Boolean", - value.copy_span(v.type_name()), + v.type_name().tagged(value.tag()), )), }, } @@ -334,16 +408,54 @@ impl Tagged { pub(crate) fn debug(&self) -> ValueDebug<'_> { ValueDebug { value: self } } + + pub fn as_column_path(&self) -> Result>>, ShellError> { + let mut out: Vec> = vec![]; + + match &self.item { + Value::Table(table) => { + for item in table { + out.push(item.as_string()?.tagged(&item.tag)); + } + } + + other => { + return Err(ShellError::type_error( + "column name", + other.type_name().tagged(&self.tag), + )) + } + } + + Ok(out.tagged(&self.tag)) + } + + pub(crate) fn as_string(&self) -> Result { + match &self.item { + Value::Primitive(Primitive::String(s)) => Ok(s.clone()), + Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)), + Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())), + // TODO: this should definitely be more general with better errors + other => Err(ShellError::labeled_error( + "Expected string", + other.type_name(), + &self.tag, + )), + } + } } impl Value { - pub(crate) fn type_name(&self) -> String { + pub fn type_name(&self) -> String { match self { Value::Primitive(p) => p.type_name(), - Value::Object(_) => format!("object"), - Value::List(_) => format!("list"), + Value::Row(_) => format!("row"), + Value::Table(_) => format!("table"), Value::Block(_) => format!("block"), - Value::Binary(_) => format!("binary"), + Value::Error(_) => format!("error"), } } @@ -351,26 +463,26 @@ impl Value { pub fn data_descriptors(&self) -> Vec { match self { Value::Primitive(_) => vec![], - Value::Object(o) => o + Value::Row(o) => o .entries .keys() .into_iter() .map(|x| x.to_string()) .collect(), Value::Block(_) => vec![], - Value::List(_) => vec![], - Value::Binary(_) => vec![], + Value::Table(_) => vec![], + Value::Error(_) => vec![], } } pub(crate) fn get_data_by_key(&self, name: &str) -> Option<&Tagged> { match self { - Value::Object(o) => o.get_data_by_key(name), - Value::List(l) => { + Value::Row(o) => o.get_data_by_key(name), + Value::Table(l) => { for item in l { match item { Tagged { - item: Value::Object(o), + item: Value::Row(o), .. } => match o.get_data_by_key(name) { Some(v) => return Some(v), @@ -385,16 +497,42 @@ impl Value { } } - pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option> { + pub(crate) fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Tagged> { + match self { + Value::Row(ref mut o) => o.get_mut_data_by_key(name), + Value::Table(ref mut l) => { + for item in l { + match item { + Tagged { + item: Value::Row(ref mut o), + .. + } => match o.get_mut_data_by_key(name) { + Some(v) => return Some(v), + None => {} + }, + _ => {} + } + } + None + } + _ => None, + } + } + + pub fn get_data_by_column_path( + &self, + tag: Tag, + path: &Vec>, + ) -> Option> { let mut current = self; - for p in path.split(".") { + for p in path { match current.get_data_by_key(p) { Some(v) => current = v, None => return None, } } - Some(Tagged::from_item(current, tag)) + Some(current.tagged(tag)) } pub fn insert_data_at_path( @@ -407,15 +545,15 @@ impl Value { let split_path: Vec<_> = path.split(".").collect(); - if let Value::Object(ref mut o) = new_obj { + if let Value::Row(ref mut o) = new_obj { let mut current = o; if split_path.len() == 1 { // Special case for inserting at the top level current .entries - .insert(path.to_string(), Tagged::from_item(new_value, tag)); - return Some(Tagged::from_item(new_obj, tag)); + .insert(path.to_string(), new_value.tagged(&tag)); + return Some(new_obj.tagged(&tag)); } for idx in 0..split_path.len() { @@ -423,19 +561,19 @@ impl Value { Some(next) => { if idx == (split_path.len() - 2) { match &mut next.item { - Value::Object(o) => { + Value::Row(o) => { o.entries.insert( split_path[idx + 1].to_string(), - Tagged::from_item(new_value, tag), + new_value.tagged(&tag), ); } _ => {} } - return Some(Tagged::from_item(new_obj, tag)); + return Some(new_obj.tagged(&tag)); } else { match next.item { - Value::Object(ref mut o) => { + Value::Row(ref mut o) => { current = o; } _ => return None, @@ -450,27 +588,43 @@ impl Value { None } - pub fn replace_data_at_path( + pub fn insert_data_at_column_path( &self, tag: Tag, - path: &str, - replaced_value: Value, + split_path: &Vec>, + new_value: Value, ) -> Option> { let mut new_obj = self.clone(); - let split_path: Vec<_> = path.split(".").collect(); - - if let Value::Object(ref mut o) = new_obj { + if let Value::Row(ref mut o) = new_obj { let mut current = o; + + if split_path.len() == 1 { + // Special case for inserting at the top level + current + .entries + .insert(split_path[0].item.clone(), new_value.tagged(&tag)); + return Some(new_obj.tagged(&tag)); + } + for idx in 0..split_path.len() { - match current.entries.get_mut(split_path[idx]) { + match current.entries.get_mut(&split_path[idx].item) { Some(next) => { - if idx == (split_path.len() - 1) { - *next = Tagged::from_item(replaced_value, tag); - return Some(Tagged::from_item(new_obj, tag)); + if idx == (split_path.len() - 2) { + match &mut next.item { + Value::Row(o) => { + o.entries.insert( + split_path[idx + 1].to_string(), + new_value.tagged(&tag), + ); + } + _ => {} + } + + return Some(new_obj.tagged(&tag)); } else { match next.item { - Value::Object(ref mut o) => { + Value::Row(ref mut o) => { current = o; } _ => return None, @@ -485,13 +639,41 @@ impl Value { None } + pub fn replace_data_at_column_path( + &self, + tag: Tag, + split_path: &Vec>, + replaced_value: Value, + ) -> Option> { + let mut new_obj = self.clone(); + let mut current = &mut new_obj; + + for idx in 0..split_path.len() { + match current.get_mut_data_by_key(&split_path[idx].item) { + Some(next) => { + if idx == (split_path.len() - 1) { + *next = replaced_value.tagged(&tag); + return Some(new_obj.tagged(&tag)); + } else { + current = &mut next.item; + } + } + None => { + return None; + } + } + } + + None + } + pub fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> { match self { p @ Value::Primitive(_) => MaybeOwned::Borrowed(p), - Value::Object(o) => o.get_data(desc), + Value::Row(o) => o.get_data(desc), Value::Block(_) => MaybeOwned::Owned(Value::nothing()), - Value::List(_) => MaybeOwned::Owned(Value::nothing()), - Value::Binary(_) => MaybeOwned::Owned(Value::nothing()), + Value::Table(_) => MaybeOwned::Owned(Value::nothing()), + Value::Error(_) => MaybeOwned::Owned(Value::nothing()), } } @@ -501,16 +683,16 @@ impl Value { Value::Block(b) => itertools::join( b.expressions .iter() - .map(|e| e.source(&b.source).to_string()), + .map(|e| e.span.slice(&b.source).to_string()), "; ", ), - Value::Object(_) => format!("[{}]", self.type_name()), - Value::List(l) => format!( - "[{} {}]", + Value::Row(_) => format!("[table: 1 row]"), + Value::Table(l) => format!( + "[table: {} {}]", l.len(), - if l.len() == 1 { "item" } else { "items" } + if l.len() == 1 { "row" } else { "rows" } ), - Value::Binary(_) => format!(""), + Value::Error(_) => format!("[error]"), } } @@ -551,21 +733,6 @@ impl Value { } } - pub(crate) fn as_string(&self) -> Result { - match self { - Value::Primitive(Primitive::String(s)) => Ok(s.clone()), - Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)), - Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)), - // TODO: this should definitely be more general with better errors - other => Err(ShellError::string(format!( - "Expected string, got {:?}", - other - ))), - } - } - pub(crate) fn is_true(&self) -> bool { match self { Value::Primitive(Primitive::Boolean(true)) => true, @@ -573,10 +740,23 @@ impl Value { } } + #[allow(unused)] + pub fn row(entries: IndexMap>) -> Value { + Value::Row(entries.into()) + } + + pub fn table(list: &Vec>) -> Value { + Value::Table(list.to_vec()) + } + pub fn string(s: impl Into) -> Value { Value::Primitive(Primitive::String(s.into())) } + pub fn pattern(s: impl Into) -> Value { + Value::Primitive(Primitive::String(s.into())) + } + pub fn path(s: impl Into) -> Value { Value::Primitive(Primitive::Path(s.into())) } @@ -593,6 +773,10 @@ impl Value { Value::Primitive(Primitive::Decimal(s.into())) } + pub fn binary(binary: Vec) -> Value { + Value::Primitive(Primitive::Binary(binary)) + } + pub fn number(s: impl Into) -> Value { let num = s.into(); @@ -610,9 +794,14 @@ impl Value { Value::Primitive(Primitive::Date(s.into())) } - pub fn date_from_str(s: &str) -> Result { - let date = DateTime::parse_from_rfc3339(s) - .map_err(|err| ShellError::string(&format!("Date parse error: {}", err)))?; + pub fn date_from_str(s: Tagged<&str>) -> Result { + let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| { + ShellError::labeled_error( + &format!("Date parse error: {}", err), + "original value", + s.tag, + ) + })?; let date = date.with_timezone(&chrono::offset::Utc); @@ -628,9 +817,10 @@ impl Tagged { pub(crate) fn as_path(&self) -> Result { match self.item() { Value::Primitive(Primitive::Path(path)) => Ok(path.clone()), + Value::Primitive(Primitive::String(path_str)) => Ok(PathBuf::from(&path_str).clone()), other => Err(ShellError::type_error( "Path", - other.type_name().tagged(self.span()), + other.type_name().tagged(self.tag()), )), } } @@ -718,3 +908,213 @@ fn coerce_compare_primitive( _ => return Err((left.type_name(), right.type_name())), }) } +#[cfg(test)] +mod tests { + + use crate::data::meta::*; + use crate::Value; + use indexmap::IndexMap; + + fn string(input: impl Into) -> Tagged { + Value::string(input.into()).tagged_unknown() + } + + fn row(entries: IndexMap>) -> Tagged { + Value::row(entries).tagged_unknown() + } + + fn table(list: &Vec>) -> Tagged { + Value::table(list).tagged_unknown() + } + + fn column_path(paths: &Vec>) -> Tagged>> { + table( + &paths + .iter() + .map(|p| string(p.as_string().unwrap())) + .collect(), + ) + .as_column_path() + .unwrap() + } + + #[test] + fn gets_matching_field_from_a_row() { + let row = Value::row(indexmap! { + "amigos".into() => table(&vec![string("andres"),string("jonathan"),string("yehuda")]) + }); + + assert_eq!( + *row.get_data_by_key("amigos").unwrap(), + table(&vec![ + string("andres"), + string("jonathan"), + string("yehuda") + ]) + ); + } + + #[test] + fn gets_matching_field_from_nested_rows_inside_a_row() { + let field_path = column_path(&vec![string("package"), string("version")]); + + let (version, tag) = string("0.4.0").into_parts(); + + let row = Value::row(indexmap! { + "package".into() => + row(indexmap! { + "name".into() => string("nu"), + "version".into() => string("0.4.0") + }) + }); + + assert_eq!( + **row.get_data_by_column_path(tag, &field_path).unwrap(), + version + ) + } + + #[test] + fn gets_first_matching_field_from_rows_with_same_field_inside_a_table() { + let field_path = column_path(&vec![string("package"), string("authors"), string("name")]); + + let (name, tag) = string("Andrés N. Robalino").into_parts(); + + let row = Value::row(indexmap! { + "package".into() => row(indexmap! { + "name".into() => string("nu"), + "version".into() => string("0.4.0"), + "authors".into() => table(&vec![ + row(indexmap!{"name".into() => string("Andrés N. Robalino")}), + row(indexmap!{"name".into() => string("Jonathan Turner")}), + row(indexmap!{"name".into() => string("Yehuda Katz")}) + ]) + }) + }); + + assert_eq!( + **row.get_data_by_column_path(tag, &field_path).unwrap(), + name + ) + } + + #[test] + fn replaces_matching_field_from_a_row() { + let field_path = column_path(&vec![string("amigos")]); + + let sample = Value::row(indexmap! { + "amigos".into() => table(&vec![ + string("andres"), + string("jonathan"), + string("yehuda"), + ]), + }); + + let (replacement, tag) = string("jonas").into_parts(); + + let actual = sample + .replace_data_at_column_path(tag, &field_path, replacement) + .unwrap(); + + assert_eq!(actual, row(indexmap! {"amigos".into() => string("jonas")})); + } + + #[test] + fn replaces_matching_field_from_nested_rows_inside_a_row() { + let field_path = column_path(&vec![ + string("package"), + string("authors"), + string("los.3.caballeros"), + ]); + + let sample = Value::row(indexmap! { + "package".into() => row(indexmap! { + "authors".into() => row(indexmap! { + "los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.caballeros".into() => table(&vec![string("andres::yehuda::jonathan")]) + }) + }) + }); + + let (replacement, tag) = table(&vec![string("yehuda::jonathan::andres")]).into_parts(); + + let actual = sample + .replace_data_at_column_path(tag.clone(), &field_path, replacement.clone()) + .unwrap(); + + assert_eq!( + actual, + Value::row(indexmap! { + "package".into() => row(indexmap! { + "authors".into() => row(indexmap! { + "los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]), + "los.3.caballeros".into() => replacement.tagged(&tag)})})}) + .tagged(tag) + ); + } + #[test] + fn replaces_matching_field_from_rows_inside_a_table() { + let field_path = column_path(&vec![ + string("shell_policy"), + string("releases"), + string("nu.version.arepa"), + ]); + + let sample = Value::row(indexmap! { + "shell_policy".into() => row(indexmap! { + "releases".into() => table(&vec![ + row(indexmap! { + "nu.version.arepa".into() => row(indexmap! { + "code".into() => string("0.4.0"), "tag_line".into() => string("GitHub-era") + }) + }), + row(indexmap! { + "nu.version.taco".into() => row(indexmap! { + "code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era") + }) + }), + row(indexmap! { + "nu.version.stable".into() => row(indexmap! { + "code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era") + }) + }) + ]) + }) + }); + + let (replacement, tag) = row(indexmap! { + "code".into() => string("0.5.0"), + "tag_line".into() => string("CABALLEROS") + }) + .into_parts(); + + let actual = sample + .replace_data_at_column_path(tag.clone(), &field_path, replacement.clone()) + .unwrap(); + + assert_eq!( + actual, + Value::row(indexmap! { + "shell_policy".into() => row(indexmap! { + "releases".into() => table(&vec![ + row(indexmap! { + "nu.version.arepa".into() => replacement.tagged(&tag) + }), + row(indexmap! { + "nu.version.taco".into() => row(indexmap! { + "code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era") + }) + }), + row(indexmap! { + "nu.version.stable".into() => row(indexmap! { + "code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era") + }) + }) + ]) + }) + }).tagged(&tag) + ); + } +} diff --git a/src/object/command.rs b/src/data/command.rs similarity index 72% rename from src/object/command.rs rename to src/data/command.rs index 317fe5fa34..5993dda6f5 100644 --- a/src/object/command.rs +++ b/src/data/command.rs @@ -1,5 +1,5 @@ use crate::commands::command::Command; -use crate::object::{TaggedDictBuilder, TaggedListBuilder, Value}; +use crate::data::{TaggedDictBuilder, TaggedListBuilder, Value}; use crate::parser::registry::{NamedType, PositionalType, Signature}; use crate::prelude::*; use std::ops::Deref; @@ -7,7 +7,7 @@ use std::ops::Deref; pub(crate) fn command_dict(command: Arc, tag: impl Into) -> Tagged { let tag = tag.into(); - let mut cmd_dict = TaggedDictBuilder::new(tag); + let mut cmd_dict = TaggedDictBuilder::new(&tag); cmd_dict.insert("name", Value::string(command.name())); @@ -32,34 +32,37 @@ fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into) -> Tagged spec.insert("name", Value::string(name)); spec.insert("type", Value::string(ty)); - spec.insert("required", Value::string(if required { "yes" } else { "no" })); + spec.insert( + "required", + Value::string(if required { "yes" } else { "no" }), + ); spec.into_tagged_value() } fn signature_dict(signature: Signature, tag: impl Into) -> Tagged { let tag = tag.into(); - let mut sig = TaggedListBuilder::new(tag); + let mut sig = TaggedListBuilder::new(&tag); for arg in signature.positional.iter() { - let is_required = match arg { - PositionalType::Mandatory(_,_) => true, - PositionalType::Optional(_,_) => false, + let is_required = match arg.0 { + PositionalType::Mandatory(_, _) => true, + PositionalType::Optional(_, _) => false, }; - sig.insert_tagged(for_spec(arg.name(), "argument", is_required, tag)); + sig.insert_tagged(for_spec(arg.0.name(), "argument", is_required, &tag)); } if let Some(_) = signature.rest_positional { let is_required = false; - sig.insert_tagged(for_spec("rest", "argument", is_required, tag)); + sig.insert_tagged(for_spec("rest", "argument", is_required, &tag)); } for (name, ty) in signature.named.iter() { - match ty { - NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, tag)), - NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, tag)), - NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, tag)), + match ty.0 { + NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)), + NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)), + NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)), } } diff --git a/src/data/config.rs b/src/data/config.rs new file mode 100644 index 0000000000..26e3e3c7d5 --- /dev/null +++ b/src/data/config.rs @@ -0,0 +1,140 @@ +use crate::commands::from_toml::convert_toml_value_to_nu_value; +use crate::commands::to_toml::value_to_toml_value; +use crate::data::{Dictionary, Value}; +use crate::errors::ShellError; +use crate::prelude::*; +use app_dirs::*; +use indexmap::IndexMap; +use log::trace; +use serde::{Deserialize, Serialize}; +use std::fs::{self, OpenOptions}; +use std::io; +use std::path::{Path, PathBuf}; + +#[derive(Deserialize, Serialize)] +struct Config { + #[serde(flatten)] + extra: IndexMap>, +} + +pub const APP_INFO: AppInfo = AppInfo { + name: "nu", + author: "nu shell developers", +}; + +pub fn config_path() -> Result { + app_path(AppDataType::UserConfig, "config") +} + +pub fn default_path() -> Result { + default_path_for(&None) +} + +pub fn default_path_for(file: &Option) -> Result { + let filename = &mut config_path()?; + let filename = match file { + None => { + filename.push("config.toml"); + filename + } + Some(file) => { + filename.push(file); + filename + } + }; + + Ok(filename.clone()) +} + +pub fn user_data() -> Result { + app_path(AppDataType::UserData, "user data") +} + +pub fn app_path(app_data_type: AppDataType, display: &str) -> Result { + let path = app_root(app_data_type, &APP_INFO).map_err(|err| { + ShellError::untagged_runtime_error(&format!("Couldn't open {} path:\n{}", display, err)) + })?; + + Ok(path) +} + +pub fn read( + tag: impl Into, + at: &Option, +) -> Result>, ShellError> { + let filename = default_path()?; + + let filename = match at { + None => filename, + Some(ref file) => file.clone(), + }; + + touch(&filename)?; + + trace!("config file = {}", filename.display()); + + let tag = tag.into(); + let contents = fs::read_to_string(filename) + .map(|v| v.tagged(&tag)) + .map_err(|err| { + ShellError::labeled_error( + &format!("Couldn't read config file:\n{}", err), + "file name", + &tag, + ) + })?; + + let parsed: toml::Value = toml::from_str(&contents).map_err(|err| { + ShellError::labeled_error( + &format!("Couldn't parse config file:\n{}", err), + "file name", + &tag, + ) + })?; + + let value = convert_toml_value_to_nu_value(&parsed, tag); + let tag = value.tag(); + match value.item { + Value::Row(Dictionary { entries }) => Ok(entries), + other => Err(ShellError::type_error( + "Dictionary", + other.type_name().tagged(&tag), + )), + } +} + +pub(crate) fn config(tag: impl Into) -> Result>, ShellError> { + read(tag, &None) +} + +pub fn write( + config: &IndexMap>, + at: &Option, +) -> Result<(), ShellError> { + let filename = &mut default_path()?; + let filename = match at { + None => filename, + Some(file) => { + filename.pop(); + filename.push(file); + filename + } + }; + + let contents = + value_to_toml_value(&Value::Row(Dictionary::new(config.clone())).tagged_unknown())?; + + let contents = toml::to_string(&contents)?; + + fs::write(&filename, &contents)?; + + Ok(()) +} + +// A simple implementation of `% touch path` (ignores existing files) +fn touch(path: &Path) -> io::Result<()> { + match OpenOptions::new().create(true).write(true).open(path) { + Ok(_) => Ok(()), + Err(e) => Err(e), + } +} diff --git a/src/object/dict.rs b/src/data/dict.rs similarity index 86% rename from src/object/dict.rs rename to src/data/dict.rs index 1f53d2ade5..432170f361 100644 --- a/src/object/dict.rs +++ b/src/data/dict.rs @@ -1,5 +1,5 @@ +use crate::data::{Primitive, Value}; use crate::prelude::*; -use crate::object::{Primitive, Value}; use derive_new::new; use indexmap::IndexMap; use serde::{Deserialize, Serialize}; @@ -64,7 +64,7 @@ impl PartialOrd for Dictionary { impl PartialEq for Dictionary { fn eq(&self, other: &Value) -> bool { match other { - Value::Object(d) => self == d, + Value::Row(d) => self == d, _ => false, } } @@ -89,6 +89,17 @@ impl Dictionary { } } + pub(crate) fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Tagged> { + match self + .entries + .iter_mut() + .find(|(desc_name, _)| *desc_name == name) + { + Some((_, v)) => Some(v), + None => None, + } + } + pub(crate) fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut debug = f.debug_struct("Dictionary"); @@ -102,7 +113,7 @@ impl Dictionary { #[derive(Debug)] pub struct TaggedListBuilder { - pub tag: Tag, + tag: Tag, list: Vec>, } @@ -115,7 +126,7 @@ impl TaggedListBuilder { } pub fn push(&mut self, value: impl Into) { - self.list.push(value.into().tagged(self.tag)); + self.list.push(value.into().tagged(&self.tag)); } pub fn insert_tagged(&mut self, value: impl Into>) { @@ -123,7 +134,7 @@ impl TaggedListBuilder { } pub fn into_tagged_value(self) -> Tagged { - Value::List(self.list).tagged(self.tag) + Value::Table(self.list).tagged(self.tag) } } @@ -155,7 +166,7 @@ impl TaggedDictBuilder { } pub fn insert(&mut self, key: impl Into, value: impl Into) { - self.dict.insert(key.into(), value.into().tagged(self.tag)); + self.dict.insert(key.into(), value.into().tagged(&self.tag)); } pub fn insert_tagged(&mut self, key: impl Into, value: impl Into>) { @@ -163,12 +174,16 @@ impl TaggedDictBuilder { } pub fn into_tagged_value(self) -> Tagged { - self.into_tagged_dict().map(Value::Object) + self.into_tagged_dict().map(Value::Row) } pub fn into_tagged_dict(self) -> Tagged { Dictionary { entries: self.dict }.tagged(self.tag) } + + pub fn is_empty(&self) -> bool { + self.dict.is_empty() + } } impl From for Tagged { diff --git a/src/object/files.rs b/src/data/files.rs similarity index 96% rename from src/object/files.rs rename to src/data/files.rs index ba48aeab9c..47c6ae093f 100644 --- a/src/object/files.rs +++ b/src/data/files.rs @@ -1,5 +1,5 @@ +use crate::data::{TaggedDictBuilder, Value}; use crate::errors::ShellError; -use crate::object::{TaggedDictBuilder, Value}; use crate::prelude::*; #[derive(Debug)] diff --git a/src/object/into.rs b/src/data/into.rs similarity index 77% rename from src/object/into.rs rename to src/data/into.rs index 1d2648a7ad..3d764fa0c1 100644 --- a/src/object/into.rs +++ b/src/data/into.rs @@ -1,4 +1,4 @@ -use crate::object::{Primitive, Value}; +use crate::data::{Primitive, Value}; use crate::prelude::*; impl From for Value { @@ -15,8 +15,8 @@ impl From for Value { impl> Tagged { pub fn into_tagged_value(self) -> Tagged { - let value_span = self.span(); + let value_tag = self.tag(); let value = self.item.into(); - value.simple_spanned(value_span) + value.tagged(value_tag) } } diff --git a/src/data/meta.rs b/src/data/meta.rs new file mode 100644 index 0000000000..2f3f0cc4c1 --- /dev/null +++ b/src/data/meta.rs @@ -0,0 +1,463 @@ +use crate::context::AnchorLocation; +use crate::parser::parse::parser::TracableContext; +use crate::prelude::*; +use derive_new::new; +use getset::Getters; +use serde::Deserialize; +use serde::Serialize; +use std::path::{Path, PathBuf}; + +#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] +pub struct Spanned { + pub span: Span, + pub item: T, +} + +impl Spanned { + pub fn map(self, input: impl FnOnce(T) -> U) -> Spanned { + let span = self.span; + + let mapped = input(self.item); + mapped.spanned(span) + } +} + +pub trait SpannedItem: Sized { + fn spanned(self, span: impl Into) -> Spanned { + Spanned { + item: self, + span: span.into(), + } + } + + fn spanned_unknown(self) -> Spanned { + Spanned { + item: self, + span: Span::unknown(), + } + } +} +impl SpannedItem for T {} + +impl std::ops::Deref for Spanned { + type Target = T; + + fn deref(&self) -> &T { + &self.item + } +} +#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] +pub struct Tagged { + pub tag: Tag, + pub item: T, +} + +impl HasTag for Tagged { + fn tag(&self) -> Tag { + self.tag.clone() + } +} + +impl AsRef for Tagged { + fn as_ref(&self) -> &Path { + self.item.as_ref() + } +} + +pub trait TaggedItem: Sized { + fn tagged(self, tag: impl Into) -> Tagged { + Tagged { + item: self, + tag: tag.into(), + } + } + + // For now, this is a temporary facility. In many cases, there are other useful spans that we + // could be using, such as the original source spans of JSON or Toml files, but we don't yet + // have the infrastructure to make that work. + fn tagged_unknown(self) -> Tagged { + Tagged { + item: self, + tag: Tag { + span: Span::unknown(), + anchor: None, + }, + } + } +} + +impl TaggedItem for T {} + +impl std::ops::Deref for Tagged { + type Target = T; + + fn deref(&self) -> &T { + &self.item + } +} + +impl Tagged { + pub fn map(self, input: impl FnOnce(T) -> U) -> Tagged { + let tag = self.tag(); + + let mapped = input(self.item); + mapped.tagged(tag) + } + + pub fn tag(&self) -> Tag { + self.tag.clone() + } + + pub fn span(&self) -> Span { + self.tag.span + } + + pub fn anchor(&self) -> Option { + self.tag.anchor.clone() + } + + pub fn anchor_name(&self) -> Option { + match self.tag.anchor { + Some(AnchorLocation::File(ref file)) => Some(file.clone()), + Some(AnchorLocation::Url(ref url)) => Some(url.clone()), + _ => None, + } + } + + pub fn item(&self) -> &T { + &self.item + } + + pub fn into_parts(self) -> (T, Tag) { + (self.item, self.tag) + } +} + +impl From<&Tag> for Tag { + fn from(input: &Tag) -> Tag { + input.clone() + } +} + +impl From> for Span { + fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Span { + Span::new(input.offset, input.offset + input.fragment.len()) + } +} + +impl From> for Span { + fn from(input: nom_locate::LocatedSpanEx<&str, u64>) -> Span { + Span::new(input.offset, input.offset + input.fragment.len()) + } +} + +impl + From<( + nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, + )> for Span +{ + fn from( + input: ( + nom_locate::LocatedSpanEx, + nom_locate::LocatedSpanEx, + ), + ) -> Span { + Span { + start: input.0.offset, + end: input.1.offset, + } + } +} + +impl From<(usize, usize)> for Span { + fn from(input: (usize, usize)) -> Span { + Span::new(input.0, input.1) + } +} + +impl From<&std::ops::Range> for Span { + fn from(input: &std::ops::Range) -> Span { + Span { + start: input.start, + end: input.end, + } + } +} + +#[derive( + Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new, +)] +pub struct Tag { + pub anchor: Option, + pub span: Span, +} + +impl From for Tag { + fn from(span: Span) -> Self { + Tag { anchor: None, span } + } +} + +impl From<&Span> for Tag { + fn from(span: &Span) -> Self { + Tag { + anchor: None, + span: *span, + } + } +} + +impl From<(usize, usize, TracableContext)> for Tag { + fn from((start, end, _context): (usize, usize, TracableContext)) -> Self { + Tag { + anchor: None, + span: Span::new(start, end), + } + } +} + +impl From<(usize, usize, AnchorLocation)> for Tag { + fn from((start, end, anchor): (usize, usize, AnchorLocation)) -> Self { + Tag { + anchor: Some(anchor), + span: Span::new(start, end), + } + } +} + +impl From<(usize, usize, Option)> for Tag { + fn from((start, end, anchor): (usize, usize, Option)) -> Self { + Tag { + anchor, + span: Span::new(start, end), + } + } +} + +impl From> for Tag { + fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag { + Tag { + anchor: None, + span: Span::new(input.offset, input.offset + input.fragment.len()), + } + } +} + +impl From for Span { + fn from(tag: Tag) -> Self { + tag.span + } +} + +impl From<&Tag> for Span { + fn from(tag: &Tag) -> Self { + tag.span + } +} + +impl Tag { + pub fn unknown_anchor(span: Span) -> Tag { + Tag { anchor: None, span } + } + + pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag { + Tag { + anchor: Some(anchor), + span: Span { + start: pos, + end: pos + 1, + }, + } + } + + pub fn unknown_span(anchor: AnchorLocation) -> Tag { + Tag { + anchor: Some(anchor), + span: Span::unknown(), + } + } + + pub fn unknown() -> Tag { + Tag { + anchor: None, + span: Span::unknown(), + } + } + + pub fn until(&self, other: impl Into) -> Tag { + let other = other.into(); + debug_assert!( + self.anchor == other.anchor, + "Can only merge two tags with the same anchor" + ); + + Tag { + span: Span::new(self.span.start, other.span.end), + anchor: self.anchor.clone(), + } + } + + pub fn until_option(&self, other: Option>) -> Tag { + match other { + Some(other) => { + let other = other.into(); + debug_assert!( + self.anchor == other.anchor, + "Can only merge two tags with the same anchor" + ); + + Tag { + span: Span::new(self.span.start, other.span.end), + anchor: self.anchor.clone(), + } + } + None => self.clone(), + } + } + + pub fn slice<'a>(&self, source: &'a str) -> &'a str { + self.span.slice(source) + } + + pub fn string<'a>(&self, source: &'a str) -> String { + self.span.slice(source).to_string() + } + + pub fn tagged_slice<'a>(&self, source: &'a str) -> Tagged<&'a str> { + self.span.slice(source).tagged(self) + } + + pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged { + self.span.slice(source).to_string().tagged(self) + } +} + +#[allow(unused)] +pub fn tag_for_tagged_list(mut iter: impl Iterator) -> Tag { + let first = iter.next(); + + let first = match first { + None => return Tag::unknown(), + Some(first) => first, + }; + + let last = iter.last(); + + match last { + None => first, + Some(last) => first.until(last), + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)] +pub struct Span { + start: usize, + end: usize, +} + +impl From> for Span { + fn from(input: Option) -> Span { + match input { + None => Span::new(0, 0), + Some(span) => span, + } + } +} + +impl Span { + pub fn unknown() -> Span { + Span::new(0, 0) + } + + pub fn new(start: usize, end: usize) -> Span { + assert!( + end >= start, + "Can't create a Span whose end < start, start={}, end={}", + start, + end + ); + + Span { start, end } + } + + pub fn for_char(pos: usize) -> Span { + Span { + start: pos, + end: pos + 1, + } + } + + pub fn until(&self, other: impl Into) -> Span { + let other = other.into(); + + Span::new(self.start, other.end) + } + + pub fn until_option(&self, other: Option>) -> Span { + match other { + Some(other) => { + let other = other.into(); + + Span::new(self.start, other.end) + } + None => *self, + } + } + + pub fn string<'a>(&self, source: &'a str) -> String { + self.slice(source).to_string() + } + + pub fn spanned_slice<'a>(&self, source: &'a str) -> Spanned<&'a str> { + self.slice(source).spanned(*self) + } + + pub fn spanned_string<'a>(&self, source: &'a str) -> Spanned { + self.slice(source).to_string().spanned(*self) + } + + /* + pub fn unknown_with_uuid(uuid: Uuid) -> Span { + Span { + start: 0, + end: 0, + source: Some(uuid), + } + } + */ + + pub fn start(&self) -> usize { + self.start + } + + pub fn end(&self) -> usize { + self.end + } + + pub fn is_unknown(&self) -> bool { + self.start == 0 && self.end == 0 + } + + pub fn slice<'a>(&self, source: &'a str) -> &'a str { + &source[self.start..self.end] + } +} + +impl language_reporting::ReportingSpan for Span { + fn with_start(&self, start: usize) -> Self { + Span::new(start, self.end) + } + + fn with_end(&self, end: usize) -> Self { + Span::new(self.start, end) + } + + fn start(&self) -> usize { + self.start + } + + fn end(&self) -> usize { + self.end + } +} diff --git a/src/object/operators.rs b/src/data/operators.rs similarity index 100% rename from src/object/operators.rs rename to src/data/operators.rs diff --git a/src/object/process.rs b/src/data/process.rs similarity index 94% rename from src/object/process.rs rename to src/data/process.rs index 337f731b58..cf6a2fb148 100644 --- a/src/object/process.rs +++ b/src/data/process.rs @@ -1,4 +1,4 @@ -use crate::object::{TaggedDictBuilder, Value}; +use crate::data::{TaggedDictBuilder, Value}; use crate::prelude::*; use itertools::join; use sysinfo::ProcessExt; diff --git a/src/object/types.rs b/src/data/types.rs similarity index 94% rename from src/object/types.rs rename to src/data/types.rs index 8dca43d878..b4ff545deb 100644 --- a/src/object/types.rs +++ b/src/data/types.rs @@ -54,7 +54,7 @@ impl ExtractType for i64 { &Tagged { item: Value::Primitive(Primitive::Int(int)), .. - } => Ok(int.tagged(value.tag).coerce_into("converting to i64")?), + } => Ok(int.tagged(&value.tag).coerce_into("converting to i64")?), other => Err(ShellError::type_error("Integer", other.tagged_type_name())), } } @@ -68,7 +68,7 @@ impl ExtractType for u64 { &Tagged { item: Value::Primitive(Primitive::Int(int)), .. - } => Ok(int.tagged(value.tag).coerce_into("converting to u64")?), + } => Ok(int.tagged(&value.tag).coerce_into("converting to u64")?), other => Err(ShellError::type_error("Integer", other.tagged_type_name())), } } diff --git a/src/errors.rs b/src/errors.rs index 3eb8e33e9c..dfad5692a1 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -1,5 +1,6 @@ use crate::prelude::*; +use crate::parser::parse::parser::TracableContext; use ansi_term::Color; use derive_new::new; use language_reporting::{Diagnostic, Label, Severity}; @@ -12,18 +13,6 @@ pub enum Description { Synthetic(String), } -impl Description { - pub fn from(value: Tagged>) -> Description { - let value_span = value.span(); - let value_tag = value.tag(); - - match value_span { - Span { start: 0, end: 0 } => Description::Synthetic(value.item.into()), - _ => Description::Source(Tagged::from_item(value.item.into(), value_tag)), - } - } -} - impl Description { fn into_label(self) -> Result, String> { match self { @@ -31,6 +20,14 @@ impl Description { Description::Synthetic(s) => Err(s), } } + + #[allow(unused)] + fn tag(&self) -> Tag { + match self { + Description::Source(tagged) => tagged.tag.clone(), + Description::Synthetic(_) => Tag::unknown(), + } + } } #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] @@ -38,6 +35,7 @@ pub enum ArgumentError { MissingMandatoryFlag(String), MissingMandatoryPositional(String), MissingValueForName(String), + InvalidExternalWord, } #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] @@ -46,6 +44,13 @@ pub struct ShellError { cause: Option>, } +impl ShellError { + #[allow(unused)] + pub(crate) fn tag(&self) -> Option { + self.error.tag() + } +} + impl ToDebug for ShellError { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { self.error.fmt_debug(f, source) @@ -57,12 +62,12 @@ impl serde::de::Error for ShellError { where T: std::fmt::Display, { - ShellError::string(msg.to_string()) + ShellError::untagged_runtime_error(msg.to_string()) } } impl ShellError { - pub(crate) fn type_error( + pub fn type_error( expected: impl Into, actual: Tagged>, ) -> ShellError { @@ -73,6 +78,21 @@ impl ShellError { .start() } + pub fn untagged_runtime_error(error: impl Into) -> ShellError { + ProximateShellError::UntaggedRuntimeError { + reason: error.into(), + } + .start() + } + + pub(crate) fn unexpected_eof(expected: impl Into, tag: impl Into) -> ShellError { + ProximateShellError::UnexpectedEof { + expected: expected.into(), + tag: tag.into(), + } + .start() + } + pub(crate) fn range_error( expected: impl Into, actual: &Tagged, @@ -80,7 +100,7 @@ impl ShellError { ) -> ShellError { ProximateShellError::RangeError { kind: expected.into(), - actual_kind: actual.copy_span(format!("{:?}", actual.item)), + actual_kind: format!("{:?}", actual.item).tagged(actual.tag()), operation, } .start() @@ -93,6 +113,7 @@ impl ShellError { .start() } + #[allow(unused)] pub(crate) fn invalid_command(problem: impl Into) -> ShellError { ProximateShellError::InvalidCommand { command: problem.into(), @@ -111,13 +132,9 @@ impl ShellError { .start() } - pub(crate) fn missing_property(subpath: Description, expr: Description) -> ShellError { - ProximateShellError::MissingProperty { subpath, expr }.start() - } - - pub(crate) fn missing_value(span: Option, reason: impl Into) -> ShellError { + pub(crate) fn missing_value(tag: Option, reason: impl Into) -> ShellError { ProximateShellError::MissingValue { - span, + tag, reason: reason.into(), } .start() @@ -126,18 +143,31 @@ impl ShellError { pub(crate) fn argument_error( command: impl Into, kind: ArgumentError, - span: Span, + tag: impl Into, ) -> ShellError { ProximateShellError::ArgumentError { command: command.into(), error: kind, - span, + tag: tag.into(), + } + .start() + } + + pub(crate) fn invalid_external_word(tag: impl Into) -> ShellError { + ProximateShellError::ArgumentError { + command: "Invalid argument to Nu command (did you mean to call an external command?)" + .into(), + error: ArgumentError::InvalidExternalWord, + tag: tag.into(), } .start() } pub(crate) fn parse_error( - error: nom::Err<(nom5_locate::LocatedSpan<&str>, nom::error::ErrorKind)>, + error: nom::Err<( + nom_locate::LocatedSpanEx<&str, TracableContext>, + nom::error::ErrorKind, + )>, ) -> ShellError { use language_reporting::*; @@ -166,21 +196,18 @@ impl ShellError { pub(crate) fn to_diagnostic(self) -> Diagnostic { match self.error { - ProximateShellError::String(StringError { title, .. }) => { - Diagnostic::new(Severity::Error, title) - } ProximateShellError::InvalidCommand { command } => { Diagnostic::new(Severity::Error, "Invalid command") .with_label(Label::new_primary(command.span)) } - ProximateShellError::MissingValue { span, reason } => { + ProximateShellError::MissingValue { tag, reason } => { let mut d = Diagnostic::new( Severity::Bug, format!("Internal Error (missing value) :: {}", reason), ); - if let Some(span) = span { - d = d.with_label(Label::new_primary(span)); + if let Some(tag) = tag { + d = d.with_label(Label::new_primary(tag.span)); } d @@ -188,8 +215,12 @@ impl ShellError { ProximateShellError::ArgumentError { command, error, - span, + tag, } => match error { + ArgumentError::InvalidExternalWord => Diagnostic::new( + Severity::Error, + format!("Invalid bare word for Nu command (did you intend to invoke an external command?)")) + .with_label(Label::new_primary(tag.span)), ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new( Severity::Error, format!( @@ -199,7 +230,7 @@ impl ShellError { Color::Black.bold().paint(name) ), ) - .with_label(Label::new_primary(span)), + .with_label(Label::new_primary(tag.span)), ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new( Severity::Error, format!( @@ -209,7 +240,7 @@ impl ShellError { ), ) .with_label( - Label::new_primary(span).with_message(format!("requires {} parameter", name)), + Label::new_primary(tag.span).with_message(format!("requires {} parameter", name)), ), ArgumentError::MissingValueForName(name) => Diagnostic::new( Severity::Error, @@ -220,29 +251,33 @@ impl ShellError { Color::Black.bold().paint(name) ), ) - .with_label(Label::new_primary(span)), + .with_label(Label::new_primary(tag.span)), }, ProximateShellError::TypeError { expected, actual: Tagged { item: Some(actual), - tag: Tag { span, .. }, + tag, }, } => Diagnostic::new(Severity::Error, "Type Error").with_label( - Label::new_primary(span) + Label::new_primary(tag.span) .with_message(format!("Expected {}, found {}", expected, actual)), ), - ProximateShellError::TypeError { expected, actual: Tagged { item: None, - tag: Tag { span, .. }, + tag }, } => Diagnostic::new(Severity::Error, "Type Error") - .with_label(Label::new_primary(span).with_message(expected)), + .with_label(Label::new_primary(tag.span).with_message(expected)), + + ProximateShellError::UnexpectedEof { + expected, tag + } => Diagnostic::new(Severity::Error, format!("Unexpected end of input")) + .with_label(Label::new_primary(tag.span).with_message(format!("Expected {}", expected))), ProximateShellError::RangeError { kind, @@ -250,10 +285,10 @@ impl ShellError { actual_kind: Tagged { item, - tag: Tag { span, .. }, + tag }, } => Diagnostic::new(Severity::Error, "Range Error").with_label( - Label::new_primary(span).with_message(format!( + Label::new_primary(tag.span).with_message(format!( "Expected to convert {} to {} while {}, but it was out of range", item, kind.desc(), @@ -264,13 +299,13 @@ impl ShellError { ProximateShellError::SyntaxError { problem: Tagged { - tag: Tag { span, .. }, - .. + tag, + item }, } => Diagnostic::new(Severity::Error, "Syntax Error") - .with_label(Label::new_primary(span).with_message("Unexpected external command")), + .with_label(Label::new_primary(tag.span).with_message(item)), - ProximateShellError::MissingProperty { subpath, expr } => { + ProximateShellError::MissingProperty { subpath, expr, .. } => { let subpath = subpath.into_label(); let expr = expr.into_label(); @@ -291,49 +326,58 @@ impl ShellError { ProximateShellError::Diagnostic(diag) => diag.diagnostic, ProximateShellError::CoerceError { left, right } => { Diagnostic::new(Severity::Error, "Coercion error") - .with_label(Label::new_primary(left.span()).with_message(left.item)) - .with_label(Label::new_secondary(right.span()).with_message(right.item)) + .with_label(Label::new_primary(left.tag().span).with_message(left.item)) + .with_label(Label::new_secondary(right.tag().span).with_message(right.item)) } + + ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason)) } } pub fn labeled_error( msg: impl Into, label: impl Into, - span: impl Into, + tag: impl Into, ) -> ShellError { ShellError::diagnostic( Diagnostic::new(Severity::Error, msg.into()) - .with_label(Label::new_primary(span.into()).with_message(label.into())), + .with_label(Label::new_primary(tag.into().span).with_message(label.into())), ) } pub fn labeled_error_with_secondary( msg: impl Into, primary_label: impl Into, - primary_span: Span, + primary_span: impl Into, secondary_label: impl Into, - secondary_span: Span, + secondary_span: impl Into, ) -> ShellError { ShellError::diagnostic( Diagnostic::new_error(msg.into()) - .with_label(Label::new_primary(primary_span).with_message(primary_label.into())) .with_label( - Label::new_secondary(secondary_span).with_message(secondary_label.into()), + Label::new_primary(primary_span.into().span).with_message(primary_label.into()), + ) + .with_label( + Label::new_secondary(secondary_span.into().span) + .with_message(secondary_label.into()), ), ) } - pub fn string(title: impl Into) -> ShellError { - ProximateShellError::String(StringError::new(title.into(), Value::nothing())).start() - } + // pub fn string(title: impl Into) -> ShellError { + // ProximateShellError::String(StringError::new(title.into(), String::new())).start() + // } + // + // pub(crate) fn unreachable(title: impl Into) -> ShellError { + // ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into())) + // } pub(crate) fn unimplemented(title: impl Into) -> ShellError { - ShellError::string(&format!("Unimplemented: {}", title.into())) + ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into())) } pub(crate) fn unexpected(title: impl Into) -> ShellError { - ShellError::string(&format!("Unexpected: {}", title.into())) + ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into())) } } @@ -378,10 +422,13 @@ impl ExpectedRange { #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] pub enum ProximateShellError { - String(StringError), SyntaxError { problem: Tagged, }, + UnexpectedEof { + expected: String, + tag: Tag, + }, InvalidCommand { command: Tag, }, @@ -392,15 +439,16 @@ pub enum ProximateShellError { MissingProperty { subpath: Description, expr: Description, + tag: Tag, }, MissingValue { - span: Option, + tag: Option, reason: String, }, ArgumentError { command: String, error: ArgumentError, - span: Span, + tag: Tag, }, RangeError { kind: ExpectedRange, @@ -412,6 +460,9 @@ pub enum ProximateShellError { left: Tagged, right: Tagged, }, + UntaggedRuntimeError { + reason: String, + }, } impl ProximateShellError { @@ -421,6 +472,22 @@ impl ProximateShellError { error: self, } } + + pub(crate) fn tag(&self) -> Option { + Some(match self { + ProximateShellError::SyntaxError { problem } => problem.tag(), + ProximateShellError::UnexpectedEof { tag, .. } => tag.clone(), + ProximateShellError::InvalidCommand { command } => command.clone(), + ProximateShellError::TypeError { actual, .. } => actual.tag.clone(), + ProximateShellError::MissingProperty { tag, .. } => tag.clone(), + ProximateShellError::MissingValue { tag, .. } => return tag.clone(), + ProximateShellError::ArgumentError { tag, .. } => tag.clone(), + ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag.clone(), + ProximateShellError::Diagnostic(..) => return None, + ProximateShellError::UntaggedRuntimeError { .. } => return None, + ProximateShellError::CoerceError { left, right } => left.tag.until(&right.tag), + }) + } } impl ToDebug for ProximateShellError { @@ -458,22 +525,23 @@ impl std::cmp::Ord for ShellDiagnostic { #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)] pub struct StringError { title: String, - error: Value, + error: String, } impl std::fmt::Display for ShellError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match &self.error { - ProximateShellError::String(s) => write!(f, "{}", &s.title), ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"), ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"), ProximateShellError::TypeError { .. } => write!(f, "TypeError"), + ProximateShellError::UnexpectedEof { .. } => write!(f, "UnexpectedEof"), ProximateShellError::RangeError { .. } => write!(f, "RangeError"), ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"), ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"), ProximateShellError::ArgumentError { .. } => write!(f, "ArgumentError"), ProximateShellError::Diagnostic(_) => write!(f, ""), ProximateShellError::CoerceError { .. } => write!(f, "CoerceError"), + ProximateShellError::UntaggedRuntimeError { .. } => write!(f, "UntaggedRuntimeError"), } } } @@ -482,81 +550,43 @@ impl std::error::Error for ShellError {} impl std::convert::From> for ShellError { fn from(input: Box) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: std::io::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: subprocess::PopenError) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{}", input)) } } impl std::convert::From for ShellError { fn from(input: serde_yaml::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From for ShellError { fn from(input: toml::ser::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From for ShellError { fn from(input: serde_json::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() - } -} - -impl std::convert::From for ShellError { - fn from(input: regex::Error) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } impl std::convert::From> for ShellError { fn from(input: Box) -> ShellError { - ProximateShellError::String(StringError { - title: format!("{:?}", input), - error: Value::nothing(), - }) - .start() + ShellError::untagged_runtime_error(format!("{:?}", input)) } } @@ -572,7 +602,6 @@ impl ShellErrorUtils> for Option> { } } } - pub trait CoerceInto { fn coerce_into(self, operation: impl Into) -> Result; } diff --git a/src/evaluate/evaluator.rs b/src/evaluate/evaluator.rs index 0bf31b5e71..9313d0fe5c 100644 --- a/src/evaluate/evaluator.rs +++ b/src/evaluate/evaluator.rs @@ -1,12 +1,15 @@ -use crate::errors::Description; -use crate::object::base::Block; +use crate::data::base::Block; +use crate::errors::ArgumentError; use crate::parser::{ hir::{self, Expression, RawExpression}, CommandRegistry, Text, }; use crate::prelude::*; +use crate::TaggedDictBuilder; use derive_new::new; use indexmap::IndexMap; +use log::trace; +use std::fmt; #[derive(new)] pub struct Scope { @@ -15,6 +18,15 @@ pub struct Scope { vars: IndexMap>, } +impl fmt::Display for Scope { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_map() + .entry(&"$it", &format!("{:?}", self.it.item)) + .entries(self.vars.iter().map(|(k, v)| (k, &v.item))) + .finish() + } +} + impl Scope { pub(crate) fn empty() -> Scope { Scope { @@ -37,24 +49,41 @@ pub(crate) fn evaluate_baseline_expr( scope: &Scope, source: &Text, ) -> Result, ShellError> { + let tag = Tag { + span: expr.span, + anchor: None, + }; match &expr.item { - RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_span(literal), source)), - RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(expr.span())), - RawExpression::Synthetic(hir::Synthetic::String(s)) => Ok(Value::string(s).tagged_unknown()), - RawExpression::Variable(var) => evaluate_reference(var, scope, source), + RawExpression::Literal(literal) => Ok(evaluate_literal(literal.tagged(tag), source)), + RawExpression::ExternalWord => Err(ShellError::argument_error( + "Invalid external word", + ArgumentError::InvalidExternalWord, + tag, + )), + RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(tag)), + RawExpression::Synthetic(hir::Synthetic::String(s)) => { + Ok(Value::string(s).tagged_unknown()) + } + RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag), + RawExpression::Command(_) => evaluate_command(tag, scope, source), RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source), RawExpression::Binary(binary) => { let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?; let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?; + trace!("left={:?} right={:?}", left.item, right.item); + match left.compare(binary.op(), &*right) { - Ok(result) => Ok(Tagged::from_simple_spanned_item( - Value::boolean(result), - expr.span(), - )), + Ok(result) => Ok(Value::boolean(result).tagged(tag)), Err((left_type, right_type)) => Err(ShellError::coerce_error( - binary.left().copy_span(left_type), - binary.right().copy_span(right_type), + left_type.tagged(Tag { + span: binary.left().span, + anchor: None, + }), + right_type.tagged(Tag { + span: binary.right().span, + anchor: None, + }), )), } } @@ -66,12 +95,11 @@ pub(crate) fn evaluate_baseline_expr( exprs.push(expr); } - Ok(Value::List(exprs).tagged(Tag::unknown_origin(expr.span()))) + Ok(Value::Table(exprs).tagged(tag)) + } + RawExpression::Block(block) => { + Ok(Value::Block(Block::new(block.clone(), source.clone(), tag.clone())).tagged(&tag)) } - RawExpression::Block(block) => Ok(Tagged::from_simple_spanned_item( - Value::Block(Block::new(block.clone(), source.clone(), expr.span())), - expr.span(), - )), RawExpression::Path(path) => { let value = evaluate_baseline_expr(path.head(), registry, scope, source)?; let mut item = value; @@ -81,24 +109,36 @@ pub(crate) fn evaluate_baseline_expr( match next { None => { - return Err(ShellError::missing_property( - Description::from(item.tagged_type_name()), - Description::from(name.clone()), - )) + let possibilities = item.data_descriptors(); + + let mut possible_matches: Vec<_> = possibilities + .iter() + .map(|x| (natural::distance::levenshtein_distance(x, &name), x)) + .collect(); + + possible_matches.sort(); + + if possible_matches.len() > 0 { + return Err(ShellError::labeled_error( + "Unknown column", + format!("did you mean '{}'?", possible_matches[0].1), + &tag, + )); + } else { + return Err(ShellError::labeled_error( + "Unknown column", + "row does not have this column", + &tag, + )); + } } Some(next) => { - item = Tagged::from_simple_spanned_item( - next.clone().item, - (expr.span().start, name.span().end), - ) + item = next.clone().item.tagged(&tag); } }; } - Ok(Tagged::from_simple_spanned_item( - item.item().clone(), - expr.span(), - )) + Ok(item.item().clone().tagged(tag)) } RawExpression::Boolean(_boolean) => unimplemented!(), } @@ -108,8 +148,9 @@ fn evaluate_literal(literal: Tagged<&hir::Literal>, source: &Text) -> Tagged int.into(), hir::Literal::Size(int, unit) => unit.compute(int), - hir::Literal::String(span) => Value::string(span.slice(source)), - hir::Literal::Bare => Value::string(literal.span().slice(source)), + hir::Literal::String(tag) => Value::string(tag.slice(source)), + hir::Literal::GlobPattern(pattern) => Value::pattern(pattern), + hir::Literal::Bare => Value::string(literal.tag().slice(source)), }; literal.map(|_| result) @@ -119,14 +160,41 @@ fn evaluate_reference( name: &hir::Variable, scope: &Scope, source: &Text, + tag: Tag, ) -> Result, ShellError> { + trace!("Evaluating {} with Scope {}", name, scope); match name { - hir::Variable::It(span) => Ok(scope.it.item.clone().simple_spanned(span)), - hir::Variable::Other(span) => Ok(scope - .vars - .get(span.slice(source)) - .map(|v| v.clone()) - .unwrap_or_else(|| Value::nothing().simple_spanned(span))), + hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)), + hir::Variable::Other(inner) => match inner.slice(source) { + x if x == "nu:env" => { + let mut dict = TaggedDictBuilder::new(&tag); + for v in std::env::vars() { + dict.insert(v.0, Value::string(v.1)); + } + Ok(dict.into_tagged_value()) + } + x if x == "nu:config" => { + let config = crate::data::config::read(tag.clone(), &None)?; + Ok(Value::row(config).tagged(tag)) + } + x if x == "nu:path" => { + let mut table = vec![]; + match std::env::var_os("PATH") { + Some(paths) => { + for path in std::env::split_paths(&paths) { + table.push(Value::path(path).tagged(&tag)); + } + } + _ => {} + } + Ok(Value::table(&table).tagged(tag)) + } + x => Ok(scope + .vars + .get(x) + .map(|v| v.clone()) + .unwrap_or_else(|| Value::nothing().tagged(tag))), + }, } } @@ -136,6 +204,10 @@ fn evaluate_external( _source: &Text, ) -> Result, ShellError> { Err(ShellError::syntax_error( - "Unexpected external command".tagged(external.name()), + "Unexpected external command".tagged(*external.name()), )) } + +fn evaluate_command(tag: Tag, _scope: &Scope, _source: &Text) -> Result, ShellError> { + Err(ShellError::syntax_error("Unexpected command".tagged(tag))) +} diff --git a/src/format.rs b/src/format.rs index 10b92000b9..6cdd5b256e 100644 --- a/src/format.rs +++ b/src/format.rs @@ -2,14 +2,12 @@ pub(crate) mod entries; pub(crate) mod generic; pub(crate) mod list; pub(crate) mod table; -pub(crate) mod vtable; use crate::prelude::*; pub(crate) use entries::EntriesView; pub(crate) use table::TableView; -pub(crate) use vtable::VTableView; pub(crate) trait RenderView { fn render_view(&self, host: &mut dyn Host) -> Result<(), ShellError>; diff --git a/src/format/generic.rs b/src/format/generic.rs index 6142b1122b..fd058f31fc 100644 --- a/src/format/generic.rs +++ b/src/format/generic.rs @@ -1,5 +1,5 @@ +use crate::data::Value; use crate::format::{EntriesView, RenderView, TableView}; -use crate::object::Value; use crate::prelude::*; use derive_new::new; @@ -13,8 +13,8 @@ impl RenderView for GenericView<'_> { fn render_view(&self, host: &mut dyn Host) -> Result<(), ShellError> { match self.value { Value::Primitive(p) => Ok(host.stdout(&p.format(None))), - Value::List(l) => { - let view = TableView::from_list(l); + Value::Table(l) => { + let view = TableView::from_list(l, 0); if let Some(view) = view { view.render_view(host)?; @@ -23,7 +23,7 @@ impl RenderView for GenericView<'_> { Ok(()) } - o @ Value::Object(_) => { + o @ Value::Row(_) => { let view = EntriesView::from_value(o); view.render_view(host)?; Ok(()) @@ -36,10 +36,7 @@ impl RenderView for GenericView<'_> { Ok(()) } - Value::Binary(_) => { - host.stdout(""); - Ok(()) - } + Value::Error(e) => Err(e.clone()), } } } diff --git a/src/format/table.rs b/src/format/table.rs index 37ed516686..a59e1adafb 100644 --- a/src/format/table.rs +++ b/src/format/table.rs @@ -1,5 +1,5 @@ +use crate::data::Value; use crate::format::RenderView; -use crate::object::Value; use crate::prelude::*; use derive_new::new; use textwrap::fill; @@ -16,6 +16,11 @@ pub struct TableView { entries: Vec>, } +enum TableMode { + Light, + Normal, +} + impl TableView { fn merge_descriptors(values: &[Tagged]) -> Vec { let mut ret = vec![]; @@ -29,7 +34,7 @@ impl TableView { ret } - pub fn from_list(values: &[Tagged]) -> Option { + pub fn from_list(values: &[Tagged], starting_idx: usize) -> Option { if values.len() == 0 { return None; } @@ -37,7 +42,7 @@ impl TableView { let mut headers = TableView::merge_descriptors(values); if headers.len() == 0 { - headers.push("value".to_string()); + headers.push("".to_string()); } let mut entries = vec![]; @@ -45,7 +50,7 @@ impl TableView { for (idx, value) in values.iter().enumerate() { let mut row: Vec<(String, &'static str)> = match value { Tagged { - item: Value::Object(..), + item: Value::Row(..), .. } => headers .iter() @@ -63,7 +68,7 @@ impl TableView { if values.len() > 1 { // Indices are black, bold, right-aligned: - row.insert(0, (format!("{}", idx.to_string()), "Fdbr")); + row.insert(0, (format!("{}", (starting_idx + idx).to_string()), "Fdbr")); } entries.push(row); @@ -198,15 +203,36 @@ impl RenderView for TableView { } let mut table = Table::new(); - table.set_format( - FormatBuilder::new() - .column_separator('│') - .separator(LinePosition::Top, LineSeparator::new('━', '┯', ' ', ' ')) - .separator(LinePosition::Title, LineSeparator::new('─', '┼', ' ', ' ')) - .separator(LinePosition::Bottom, LineSeparator::new('━', '┷', ' ', ' ')) - .padding(1, 1) - .build(), - ); + + let table_mode = crate::data::config::config(Tag::unknown())? + .get("table_mode") + .map(|s| match s.as_string().unwrap().as_ref() { + "light" => TableMode::Light, + _ => TableMode::Normal, + }) + .unwrap_or(TableMode::Normal); + + match table_mode { + TableMode::Light => { + table.set_format( + FormatBuilder::new() + .separator(LinePosition::Title, LineSeparator::new('─', '─', ' ', ' ')) + .padding(1, 1) + .build(), + ); + } + _ => { + table.set_format( + FormatBuilder::new() + .column_separator('│') + .separator(LinePosition::Top, LineSeparator::new('━', '┯', ' ', ' ')) + .separator(LinePosition::Title, LineSeparator::new('─', '┼', ' ', ' ')) + .separator(LinePosition::Bottom, LineSeparator::new('━', '┷', ' ', ' ')) + .padding(1, 1) + .build(), + ); + } + } let header: Vec = self .headers diff --git a/src/format/vtable.rs b/src/format/vtable.rs deleted file mode 100644 index 08c4a72d0d..0000000000 --- a/src/format/vtable.rs +++ /dev/null @@ -1,81 +0,0 @@ -use crate::format::RenderView; -use crate::object::Value; -use crate::prelude::*; -use derive_new::new; - -use prettytable::format::{FormatBuilder, LinePosition, LineSeparator}; -use prettytable::{color, Attr, Cell, Row, Table}; - -#[derive(new)] -pub struct VTableView { - entries: Vec>, -} - -impl VTableView { - pub fn from_list(values: &[Tagged]) -> Option { - if values.len() == 0 { - return None; - } - - let item = &values[0]; - let headers = item.data_descriptors(); - - if headers.len() == 0 { - return None; - } - - let mut entries = vec![]; - - for header in headers { - let mut row = vec![]; - - row.push(header.clone()); - for value in values { - row.push(value.get_data(&header).borrow().format_leaf(Some(&header))); - } - entries.push(row); - } - - Some(VTableView { entries }) - } -} - -impl RenderView for VTableView { - fn render_view(&self, host: &mut dyn Host) -> Result<(), ShellError> { - if self.entries.len() == 0 { - return Ok(()); - } - - let mut table = Table::new(); - table.set_format( - FormatBuilder::new() - .column_separator('│') - .separator(LinePosition::Top, LineSeparator::new('━', '┯', ' ', ' ')) - .separator(LinePosition::Title, LineSeparator::new('─', '┼', ' ', ' ')) - .separator(LinePosition::Bottom, LineSeparator::new('━', '┷', ' ', ' ')) - .padding(1, 1) - .build(), - ); - - for row in &self.entries { - table.add_row(Row::new( - row.iter() - .enumerate() - .map(|(idx, h)| { - if idx == 0 { - Cell::new(h) - .with_style(Attr::ForegroundColor(color::GREEN)) - .with_style(Attr::Bold) - } else { - Cell::new(h) - } - }) - .collect(), - )); - } - - table.print_term(&mut *host.out_terminal()).unwrap(); - - Ok(()) - } -} diff --git a/src/fuzzysearch.rs b/src/fuzzysearch.rs new file mode 100644 index 0000000000..c7d58ed632 --- /dev/null +++ b/src/fuzzysearch.rs @@ -0,0 +1,187 @@ +use ansi_term::{ANSIString, ANSIStrings, Colour, Style}; +#[cfg(feature = "crossterm")] +use crossterm::{cursor, terminal, ClearType, InputEvent, KeyEvent, RawScreen}; +use std::io::Write; +use sublime_fuzzy::best_match; + +pub enum SelectionResult { + Selected(String), + Edit(String), + NoSelection, +} + +pub fn interactive_fuzzy_search(lines: &Vec<&str>, max_results: usize) -> SelectionResult { + #[derive(PartialEq)] + enum State { + Selecting, + Quit, + Selected(String), + Edit(String), + } + let mut state = State::Selecting; + #[cfg(feature = "crossterm")] + { + if let Ok(_raw) = RawScreen::into_raw_mode() { + // User input for search + let mut searchinput = String::new(); + let mut selected = 0; + + let mut cursor = cursor(); + let _ = cursor.hide(); + let input = crossterm::input(); + let mut sync_stdin = input.read_sync(); + + while state == State::Selecting { + let mut selected_lines = fuzzy_search(&searchinput, &lines, max_results); + let num_lines = selected_lines.len(); + paint_selection_list(&selected_lines, selected); + if let Some(ev) = sync_stdin.next() { + match ev { + InputEvent::Keyboard(k) => match k { + KeyEvent::Esc | KeyEvent::Ctrl('c') => { + state = State::Quit; + } + KeyEvent::Up => { + if selected > 0 { + selected -= 1; + } + } + KeyEvent::Down => { + if selected + 1 < selected_lines.len() { + selected += 1; + } + } + KeyEvent::Char('\n') => { + state = if selected_lines.len() > 0 { + State::Selected(selected_lines.remove(selected).text) + } else { + State::Edit("".to_string()) + }; + } + KeyEvent::Char('\t') | KeyEvent::Right => { + state = if selected_lines.len() > 0 { + State::Edit(selected_lines.remove(selected).text) + } else { + State::Edit("".to_string()) + }; + } + KeyEvent::Char(ch) => { + searchinput.push(ch); + selected = 0; + } + KeyEvent::Backspace => { + searchinput.pop(); + selected = 0; + } + _ => {} + }, + _ => {} + } + } + if num_lines > 0 { + cursor.move_up(num_lines as u16); + } + } + let (_x, y) = cursor.pos(); + let _ = cursor.goto(0, y - 1); + let _ = cursor.show(); + let _ = RawScreen::disable_raw_mode(); + } + terminal().clear(ClearType::FromCursorDown).unwrap(); + } + match state { + State::Selected(line) => SelectionResult::Selected(line), + State::Edit(line) => SelectionResult::Edit(line), + _ => SelectionResult::NoSelection, + } +} + +pub struct Match { + text: String, + char_matches: Vec<(usize, usize)>, +} + +pub fn fuzzy_search(searchstr: &str, lines: &Vec<&str>, max_results: usize) -> Vec { + if searchstr.is_empty() { + return lines + .iter() + .take(max_results) + .map(|line| Match { + text: line.to_string(), + char_matches: Vec::new(), + }) + .collect(); + } + + let mut matches = lines + .iter() + .enumerate() + .map(|(idx, line)| (idx, best_match(&searchstr, line))) + .filter(|(_i, m)| m.is_some()) + .map(|(i, m)| (i, m.unwrap())) + .collect::>(); + matches.sort_by(|a, b| b.1.score().cmp(&a.1.score())); + + let results: Vec = matches + .iter() + .take(max_results) + .map(|(i, m)| Match { + text: lines[*i].to_string(), + char_matches: m.continuous_matches(), + }) + .collect(); + results +} + +#[cfg(feature = "crossterm")] +fn highlight(textmatch: &Match, normal: Style, highlighted: Style) -> Vec { + let text = &textmatch.text; + let mut ansi_strings = vec![]; + let mut idx = 0; + for (match_idx, len) in &textmatch.char_matches { + ansi_strings.push(normal.paint(&text[idx..*match_idx])); + idx = match_idx + len; + ansi_strings.push(highlighted.paint(&text[*match_idx..idx])); + } + if idx < text.len() { + ansi_strings.push(normal.paint(&text[idx..text.len()])); + } + ansi_strings +} + +#[cfg(feature = "crossterm")] +fn paint_selection_list(lines: &Vec, selected: usize) { + let terminal = terminal(); + let size = terminal.terminal_size(); + let width = size.0 as usize; + let cursor = cursor(); + let (_x, y) = cursor.pos(); + for (i, line) in lines.iter().enumerate() { + let _ = cursor.goto(0, y + (i as u16)); + let (style, highlighted) = if selected == i { + (Colour::White.normal(), Colour::Cyan.normal()) + } else { + (Colour::White.dimmed(), Colour::Cyan.normal()) + }; + let mut ansi_strings = highlight(line, style, highlighted); + for _ in line.text.len()..width { + ansi_strings.push(style.paint(' '.to_string())); + } + println!("{}", ANSIStrings(&ansi_strings)); + } + let _ = cursor.goto(0, y + (lines.len() as u16)); + print!( + "{}", + Colour::Blue.paint("[ESC to quit, Enter to execute, Tab to edit]") + ); + + let _ = std::io::stdout().flush(); + // Clear additional lines from previous selection + terminal.clear(ClearType::FromCursorDown).unwrap(); +} + +#[test] +fn fuzzy_match() { + let matches = fuzzy_search("cb", &vec!["abc", "cargo build"], 1); + assert_eq!(matches[0].text, "cargo build"); +} diff --git a/src/lib.rs b/src/lib.rs index 2e9b8fb061..520e08a136 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,8 @@ -#![feature(generators)] -#![feature(proc_macro_hygiene)] +#![recursion_limit = "1024"] + +#[cfg(test)] +#[macro_use] +extern crate indexmap; #[macro_use] mod prelude; @@ -7,12 +10,13 @@ mod prelude; mod cli; mod commands; mod context; +mod data; mod env; mod errors; mod evaluate; mod format; +mod fuzzysearch; mod git; -mod object; mod parser; mod plugin; mod shell; @@ -21,17 +25,18 @@ mod traits; mod utils; pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue}; -pub use crate::context::{SourceMap, SpanSource}; +pub use crate::context::AnchorLocation; pub use crate::env::host::BasicHost; -pub use crate::parser::hir::SyntaxType; +pub use crate::parser::hir::SyntaxShape; pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder; pub use crate::plugin::{serve_plugin, Plugin}; pub use crate::utils::{AbsoluteFile, AbsolutePath, RelativePath}; pub use cli::cli; +pub use data::base::{Primitive, Value}; +pub use data::config::{config_path, APP_INFO}; +pub use data::dict::{Dictionary, TaggedDictBuilder}; +pub use data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem}; pub use errors::{CoerceInto, ShellError}; pub use num_traits::cast::ToPrimitive; -pub use object::base::{Primitive, Value}; -pub use object::dict::{Dictionary, TaggedDictBuilder}; -pub use object::meta::{Span, Tag, Tagged, TaggedItem}; pub use parser::parse::text::Text; pub use parser::registry::{EvaluatedArgs, NamedType, PositionalType, Signature}; diff --git a/src/object/config.rs b/src/object/config.rs deleted file mode 100644 index 53a1c446f8..0000000000 --- a/src/object/config.rs +++ /dev/null @@ -1,84 +0,0 @@ -use crate::commands::from_toml::convert_toml_value_to_nu_value; -use crate::commands::to_toml::value_to_toml_value; -use crate::errors::ShellError; -use crate::object::{Dictionary, Value}; -use crate::prelude::*; -use app_dirs::*; -use indexmap::IndexMap; -use log::trace; -use serde::{Deserialize, Serialize}; -use std::fs::{self, OpenOptions}; -use std::io; -use std::path::{Path, PathBuf}; - -const APP_INFO: AppInfo = AppInfo { - name: "nu", - author: "nu shell developers", -}; - -#[derive(Deserialize, Serialize)] -struct Config { - #[serde(flatten)] - extra: IndexMap>, -} - -pub(crate) fn config_path() -> Result { - let location = app_root(AppDataType::UserConfig, &APP_INFO) - .map_err(|err| ShellError::string(&format!("Couldn't open config file:\n{}", err)))?; - - Ok(location.join("config.toml")) -} - -pub(crate) fn write_config(config: &IndexMap>) -> Result<(), ShellError> { - let location = app_root(AppDataType::UserConfig, &APP_INFO) - .map_err(|err| ShellError::string(&format!("Couldn't open config file:\n{}", err)))?; - - let filename = location.join("config.toml"); - touch(&filename)?; - - let contents = - value_to_toml_value(&Value::Object(Dictionary::new(config.clone())).tagged_unknown())?; - - let contents = toml::to_string(&contents)?; - - fs::write(&filename, &contents)?; - - Ok(()) -} - -pub(crate) fn config(span: impl Into) -> Result>, ShellError> { - let span = span.into(); - - let location = app_root(AppDataType::UserConfig, &APP_INFO) - .map_err(|err| ShellError::string(&format!("Couldn't open config file:\n{}", err)))?; - - let filename = location.join("config.toml"); - touch(&filename)?; - - trace!("config file = {}", filename.display()); - - let contents = fs::read_to_string(filename) - .map(|v| v.simple_spanned(span)) - .map_err(|err| ShellError::string(&format!("Couldn't read config file:\n{}", err)))?; - - let parsed: toml::Value = toml::from_str(&contents) - .map_err(|err| ShellError::string(&format!("Couldn't parse config file:\n{}", err)))?; - - let value = convert_toml_value_to_nu_value(&parsed, Tag::unknown_origin(span)); - let tag = value.tag(); - match value.item { - Value::Object(Dictionary { entries }) => Ok(entries), - other => Err(ShellError::type_error( - "Dictionary", - other.type_name().tagged(tag), - )), - } -} - -// A simple implementation of `% touch path` (ignores existing files) -fn touch(path: &Path) -> io::Result<()> { - match OpenOptions::new().create(true).write(true).open(path) { - Ok(_) => Ok(()), - Err(e) => Err(e), - } -} diff --git a/src/object/meta.rs b/src/object/meta.rs deleted file mode 100644 index f1d2b6713d..0000000000 --- a/src/object/meta.rs +++ /dev/null @@ -1,286 +0,0 @@ -use crate::context::{SourceMap, SpanSource}; -use crate::prelude::*; -use crate::Text; -use derive_new::new; -use getset::Getters; -use serde::Deserialize; -use serde::Serialize; -use uuid::Uuid; - -#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] -pub struct Tagged { - pub tag: Tag, - pub item: T, -} - -impl HasSpan for Tagged { - fn span(&self) -> Span { - self.tag.span - } -} - -pub trait TaggedItem: Sized { - fn tagged(self, tag: impl Into) -> Tagged { - Tagged::from_item(self, tag.into()) - } - - fn simple_spanned(self, span: impl Into) -> Tagged { - Tagged::from_simple_spanned_item(self, span.into()) - } - - // For now, this is a temporary facility. In many cases, there are other useful spans that we - // could be using, such as the original source spans of JSON or Toml files, but we don't yet - // have the infrastructure to make that work. - fn tagged_unknown(self) -> Tagged { - Tagged::from_item( - self, - Tag { - span: Span::unknown(), - origin: None, - }, - ) - } -} - -impl TaggedItem for T {} - -impl std::ops::Deref for Tagged { - type Target = T; - - fn deref(&self) -> &T { - &self.item - } -} - -impl Tagged { - pub fn spanned(self, span: impl Into) -> Tagged { - Tagged::from_item( - self.item, - Tag { - span: span.into(), - origin: None, - }, - ) - } - - pub fn from_item(item: T, tag: impl Into) -> Tagged { - Tagged { - item, - tag: tag.into(), - } - } - - pub fn from_simple_spanned_item(item: T, span: impl Into) -> Tagged { - Tagged::from_item( - item, - Tag { - span: span.into(), - origin: None, - }, - ) - } - - pub fn map(self, input: impl FnOnce(T) -> U) -> Tagged { - let tag = self.tag(); - - let mapped = input(self.item); - Tagged::from_item(mapped, tag.clone()) - } - - pub(crate) fn copy_span(&self, output: U) -> Tagged { - let span = self.span(); - - Tagged::from_simple_spanned_item(output, span) - } - - pub fn source(&self, source: &Text) -> Text { - Text::from(self.span().slice(source)) - } - - pub fn span(&self) -> Span { - self.tag.span - } - - pub fn tag(&self) -> Tag { - self.tag - } - - pub fn origin(&self) -> Option { - self.tag.origin - } - - pub fn origin_name(&self, source_map: &SourceMap) -> Option { - match self.tag.origin.map(|x| source_map.get(&x)) { - Some(Some(SpanSource::File(file))) => Some(file.clone()), - Some(Some(SpanSource::Url(url))) => Some(url.clone()), - _ => None, - } - } - - pub fn item(&self) -> &T { - &self.item - } - - pub fn into_parts(self) -> (T, Tag) { - (self.item, self.tag) - } -} - -impl From<&Tagged> for Span { - fn from(input: &Tagged) -> Span { - input.span() - } -} - -impl From<&Span> for Span { - fn from(input: &Span) -> Span { - *input - } -} - -impl From> for Span { - fn from(input: nom5_locate::LocatedSpan<&str>) -> Span { - Span { - start: input.offset, - end: input.offset + input.fragment.len(), - } - } -} - -impl From<(nom5_locate::LocatedSpan, nom5_locate::LocatedSpan)> for Span { - fn from(input: (nom5_locate::LocatedSpan, nom5_locate::LocatedSpan)) -> Span { - Span { - start: input.0.offset, - end: input.1.offset, - } - } -} - -impl From<(usize, usize)> for Span { - fn from(input: (usize, usize)) -> Span { - Span { - start: input.0, - end: input.1, - } - } -} - -impl From<&std::ops::Range> for Span { - fn from(input: &std::ops::Range) -> Span { - Span { - start: input.start, - end: input.end, - } - } -} - -#[derive( - Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, -)] -pub struct Tag { - pub origin: Option, - pub span: Span, -} - -impl From for Tag { - fn from(span: Span) -> Self { - Tag { origin: None, span } - } -} - -impl From<&Span> for Tag { - fn from(span: &Span) -> Self { - Tag { - origin: None, - span: *span, - } - } -} - -impl From for Span { - fn from(tag: Tag) -> Self { - tag.span - } -} - -impl From<&Tag> for Span { - fn from(tag: &Tag) -> Self { - tag.span - } -} - -impl Tag { - pub fn unknown_origin(span: Span) -> Tag { - Tag { origin: None, span } - } - - pub fn unknown() -> Tag { - Tag { - origin: None, - span: Span::unknown(), - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)] -pub struct Span { - pub(crate) start: usize, - pub(crate) end: usize, -} - -impl From> for Span { - fn from(input: Option) -> Span { - match input { - None => Span { start: 0, end: 0 }, - Some(span) => span, - } - } -} - -impl Span { - pub fn unknown() -> Span { - Span { start: 0, end: 0 } - } - - /* - pub fn unknown_with_uuid(uuid: Uuid) -> Span { - Span { - start: 0, - end: 0, - source: Some(uuid), - } - } - */ - - pub fn is_unknown(&self) -> bool { - self.start == 0 && self.end == 0 - } - - pub fn slice<'a>(&self, source: &'a str) -> &'a str { - &source[self.start..self.end] - } -} - -impl language_reporting::ReportingSpan for Span { - fn with_start(&self, start: usize) -> Self { - Span { - start, - end: self.end, - } - } - - fn with_end(&self, end: usize) -> Self { - Span { - start: self.start, - end, - } - } - - fn start(&self) -> usize { - self.start - } - - fn end(&self) -> usize { - self.end - } -} diff --git a/src/parser.rs b/src/parser.rs index 2fd891efb0..7acdf6e6bf 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -7,18 +7,17 @@ pub(crate) mod registry; use crate::errors::ShellError; pub(crate) use deserializer::ConfigDeserializer; -pub(crate) use hir::baseline_parse_tokens::baseline_parse_tokens; +pub(crate) use hir::syntax_shape::flat_shape::FlatShape; +pub(crate) use hir::TokensIterator; pub(crate) use parse::call_node::CallNode; pub(crate) use parse::files::Files; -pub(crate) use parse::flag::Flag; +pub(crate) use parse::flag::{Flag, FlagKind}; pub(crate) use parse::operator::Operator; pub(crate) use parse::parser::{nom_input, pipeline}; -pub(crate) use parse::pipeline::{Pipeline, PipelineElement}; pub(crate) use parse::text::Text; -pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode}; -pub(crate) use parse::tokens::{RawToken, Token}; +pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; +pub(crate) use parse::tokens::{RawNumber, RawToken}; pub(crate) use parse::unit::Unit; -pub(crate) use parse_command::parse_command; pub(crate) use registry::CommandRegistry; pub fn parse(input: &str) -> Result { diff --git a/src/parser/deserializer.rs b/src/parser/deserializer.rs index d37df05157..4b8bf913d5 100644 --- a/src/parser/deserializer.rs +++ b/src/parser/deserializer.rs @@ -37,7 +37,7 @@ impl<'de> ConfigDeserializer<'de> { let value: Option> = if name == "rest" { let positional = self.call.args.slice_from(self.position); self.position += positional.len(); - Some(Value::List(positional).tagged_unknown()) // TODO: correct span + Some(Value::Table(positional).tagged_unknown()) // TODO: correct tag } else { if self.call.args.has(name) { self.call.args.get(name).map(|x| x.clone()) @@ -52,9 +52,7 @@ impl<'de> ConfigDeserializer<'de> { self.stack.push(DeserializerItem { key_struct_field: Some((name.to_string(), name)), - val: value.unwrap_or_else(|| { - Value::nothing().tagged(Tag::unknown_origin(self.call.name_span)) - }), + val: value.unwrap_or_else(|| Value::nothing().tagged(&self.call.name_tag)), }); Ok(()) @@ -240,14 +238,11 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { trace!(" Extracting {:?} for vec", value.val); match value.val.into_parts() { - (Value::List(items), _) => { + (Value::Table(items), _) => { let de = SeqDeserializer::new(&mut self, items.into_iter()); visitor.visit_seq(de) } - (other, tag) => Err(ShellError::type_error( - "Vec", - other.type_name().tagged(tag), - )), + (other, tag) => Err(ShellError::type_error("Vec", other.type_name().tagged(tag))), } } fn deserialize_tuple(mut self, len: usize, visitor: V) -> Result @@ -255,10 +250,14 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { V: Visitor<'de>, { let value = self.pop(); - trace!(" Extracting {:?} for tuple with {} elements", value.val, len); + trace!( + " Extracting {:?} for tuple with {} elements", + value.val, + len + ); match value.val.into_parts() { - (Value::List(items), _) => { + (Value::Table(items), _) => { let de = SeqDeserializer::new(&mut self, items.into_iter()); visitor.visit_seq(de) } @@ -298,7 +297,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { val: T, name: &'static str, fields: &'static [&'static str], - visitor: V + visitor: V, ) -> Result where T: serde::Serialize, @@ -311,9 +310,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { return Ok(r); } trace!( - "deserializing struct {:?} {:?} (stack={:?})", + "deserializing struct {:?} {:?} (saw_root={} stack={:?})", name, fields, + self.saw_root, self.stack ); @@ -327,7 +327,13 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { let type_name = std::any::type_name::(); let tagged_val_name = std::any::type_name::>(); - if name == tagged_val_name { + trace!( + "type_name={} tagged_val_name={}", + type_name, + tagged_val_name + ); + + if type_name == tagged_val_name { return visit::, _>(value.val, name, fields, visitor); } @@ -364,7 +370,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { } => { let i: i64 = int.tagged(value.val.tag).coerce_into("converting to i64")?; visit::, _>(i.tagged(tag), name, fields, visitor) - }, + } Tagged { item: Value::Primitive(Primitive::String(string)), .. @@ -398,21 +404,20 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> { } } -struct SeqDeserializer<'a, 'de: 'a, I: Iterator>> { +struct SeqDeserializer<'a, 'de: 'a, I: Iterator>> { de: &'a mut ConfigDeserializer<'de>, vals: I, } -impl<'a, 'de: 'a, I: Iterator>> SeqDeserializer<'a, 'de, I> { +impl<'a, 'de: 'a, I: Iterator>> SeqDeserializer<'a, 'de, I> { fn new(de: &'a mut ConfigDeserializer<'de>, vals: I) -> Self { - SeqDeserializer { - de, - vals, - } + SeqDeserializer { de, vals } } } -impl<'a, 'de: 'a, I: Iterator>> de::SeqAccess<'de> for SeqDeserializer<'a, 'de, I> { +impl<'a, 'de: 'a, I: Iterator>> de::SeqAccess<'de> + for SeqDeserializer<'a, 'de, I> +{ type Error = ShellError; fn next_element_seed(&mut self, seed: T) -> Result, Self::Error> @@ -441,10 +446,7 @@ struct StructDeserializer<'a, 'de: 'a> { impl<'a, 'de: 'a> StructDeserializer<'a, 'de> { fn new(de: &'a mut ConfigDeserializer<'de>, fields: &'static [&'static str]) -> Self { - StructDeserializer { - de, - fields, - } + StructDeserializer { de, fields } } } @@ -470,3 +472,27 @@ impl<'a, 'de: 'a> de::SeqAccess<'de> for StructDeserializer<'a, 'de> { return Some(self.fields.len()); } } + +#[cfg(test)] +mod tests { + use super::*; + use std::any::type_name; + #[test] + fn check_type_name_properties() { + // This ensures that certain properties for the + // std::any::type_name function hold, that + // this code relies on. The type_name docs explicitly + // mention that the actual format of the output + // is unspecified and change is likely. + // This test makes sure that such change is detected + // by this test failing, and not things silently breaking. + // Specifically, we rely on this behaviour further above + // in the file to special case Tagged parsing. + let tuple = type_name::<()>(); + let tagged_tuple = type_name::>(); + let tagged_value = type_name::>(); + assert!(tuple != tagged_tuple); + assert!(tuple != tagged_value); + assert!(tagged_tuple != tagged_value); + } +} diff --git a/src/parser/hir.rs b/src/parser/hir.rs index 3e155cc059..7108b0f7f9 100644 --- a/src/parser/hir.rs +++ b/src/parser/hir.rs @@ -1,11 +1,13 @@ pub(crate) mod baseline_parse; -pub(crate) mod baseline_parse_tokens; pub(crate) mod binary; +pub(crate) mod expand_external_tokens; pub(crate) mod external_command; pub(crate) mod named; pub(crate) mod path; +pub(crate) mod syntax_shape; +pub(crate) mod tokens_iterator; -use crate::parser::{registry, Unit}; +use crate::parser::{registry, Operator, Unit}; use crate::prelude::*; use derive_new::new; use getset::Getters; @@ -14,27 +16,18 @@ use std::fmt; use std::path::PathBuf; use crate::evaluate::Scope; +use crate::parser::parse::tokens::RawNumber; +use crate::traits::ToDebug; -pub(crate) use self::baseline_parse::{ - baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path, - baseline_parse_token_as_string, -}; -pub(crate) use self::baseline_parse_tokens::{baseline_parse_next_expr, TokensIterator}; pub(crate) use self::binary::Binary; pub(crate) use self::external_command::ExternalCommand; pub(crate) use self::named::NamedArguments; pub(crate) use self::path::Path; +pub(crate) use self::syntax_shape::ExpandContext; +pub(crate) use self::tokens_iterator::debug::debug_tokens; +pub(crate) use self::tokens_iterator::TokensIterator; -pub use self::baseline_parse_tokens::SyntaxType; - -pub fn path(head: impl Into, tail: Vec>>) -> Path { - Path::new( - head.into(), - tail.into_iter() - .map(|item| item.map(|string| string.into())) - .collect(), - ) -} +pub use self::syntax_shape::SyntaxShape; #[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)] pub struct Call { @@ -83,14 +76,17 @@ impl ToDebug for Call { #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RawExpression { Literal(Literal), + ExternalWord, Synthetic(Synthetic), Variable(Variable), Binary(Box), Block(Vec), List(Vec), Path(Box), + FilePath(PathBuf), ExternalCommand(ExternalCommand), + Command(Span), Boolean(bool), } @@ -113,23 +109,58 @@ impl RawExpression { match self { RawExpression::Literal(literal) => literal.type_name(), RawExpression::Synthetic(synthetic) => synthetic.type_name(), - RawExpression::FilePath(..) => "filepath", + RawExpression::Command(..) => "command", + RawExpression::ExternalWord => "external word", + RawExpression::FilePath(..) => "file path", RawExpression::Variable(..) => "variable", RawExpression::List(..) => "list", RawExpression::Binary(..) => "binary", RawExpression::Block(..) => "block", - RawExpression::Path(..) => "path", + RawExpression::Path(..) => "variable path", RawExpression::Boolean(..) => "boolean", RawExpression::ExternalCommand(..) => "external", } } } -pub type Expression = Tagged; +pub type Expression = Spanned; + +impl std::fmt::Display for Expression { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let span = self.span; + + match &self.item { + RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.span)), + RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s), + RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()), + RawExpression::ExternalWord => { + write!(f, "ExternalWord{{ {}..{} }}", span.start(), span.end()) + } + RawExpression::FilePath(file) => write!(f, "Path{{ {} }}", file.display()), + RawExpression::Variable(variable) => write!(f, "{}", variable), + RawExpression::List(list) => f + .debug_list() + .entries(list.iter().map(|e| format!("{}", e))) + .finish(), + RawExpression::Binary(binary) => write!(f, "{}", binary), + RawExpression::Block(items) => { + write!(f, "Block")?; + f.debug_set() + .entries(items.iter().map(|i| format!("{}", i))) + .finish() + } + RawExpression::Path(path) => write!(f, "{}", path), + RawExpression::Boolean(b) => write!(f, "${}", b), + RawExpression::ExternalCommand(..) => { + write!(f, "ExternalComment{{ {}..{} }}", span.start(), span.end()) + } + } + } +} impl Expression { pub(crate) fn number(i: impl Into, span: impl Into) -> Expression { - Tagged::from_simple_spanned_item(RawExpression::Literal(Literal::Number(i.into())), span) + RawExpression::Literal(Literal::Number(i.into())).spanned(span.into()) } pub(crate) fn size( @@ -137,58 +168,89 @@ impl Expression { unit: impl Into, span: impl Into, ) -> Expression { - Tagged::from_simple_spanned_item( - RawExpression::Literal(Literal::Size(i.into(), unit.into())), - span, - ) + RawExpression::Literal(Literal::Size(i.into(), unit.into())).spanned(span.into()) } pub(crate) fn synthetic_string(s: impl Into) -> Expression { - RawExpression::Synthetic(Synthetic::String(s.into())).tagged_unknown() + RawExpression::Synthetic(Synthetic::String(s.into())).spanned_unknown() } pub(crate) fn string(inner: impl Into, outer: impl Into) -> Expression { - Tagged::from_simple_spanned_item( - RawExpression::Literal(Literal::String(inner.into())), - outer.into(), - ) + RawExpression::Literal(Literal::String(inner.into())).spanned(outer.into()) + } + + pub(crate) fn path( + head: Expression, + tail: Vec>>, + span: impl Into, + ) -> Expression { + let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect(); + RawExpression::Path(Box::new(Path::new(head, tail))).spanned(span.into()) + } + + pub(crate) fn dot_member(head: Expression, next: Spanned>) -> Expression { + let Spanned { item, span } = head; + let new_span = head.span.until(next.span); + + match item { + RawExpression::Path(path) => { + let (head, mut tail) = path.parts(); + + tail.push(next.map(|i| i.into())); + Expression::path(head, tail, new_span) + } + + other => Expression::path(other.spanned(span), vec![next], new_span), + } + } + + pub(crate) fn infix( + left: Expression, + op: Spanned>, + right: Expression, + ) -> Expression { + let new_span = left.span.until(right.span); + + RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right))) + .spanned(new_span) } pub(crate) fn file_path(path: impl Into, outer: impl Into) -> Expression { - Tagged::from_simple_spanned_item(RawExpression::FilePath(path.into()), outer.into()) + RawExpression::FilePath(path.into()).spanned(outer) + } + + pub(crate) fn list(list: Vec, span: impl Into) -> Expression { + RawExpression::List(list).spanned(span) } pub(crate) fn bare(span: impl Into) -> Expression { - Tagged::from_simple_spanned_item(RawExpression::Literal(Literal::Bare), span.into()) + RawExpression::Literal(Literal::Bare).spanned(span) + } + + pub(crate) fn pattern(inner: impl Into, outer: impl Into) -> Expression { + RawExpression::Literal(Literal::GlobPattern(inner.into())).spanned(outer.into()) } pub(crate) fn variable(inner: impl Into, outer: impl Into) -> Expression { - Tagged::from_simple_spanned_item( - RawExpression::Variable(Variable::Other(inner.into())), - outer.into(), - ) + RawExpression::Variable(Variable::Other(inner.into())).spanned(outer) } pub(crate) fn external_command(inner: impl Into, outer: impl Into) -> Expression { - Tagged::from_simple_spanned_item( - RawExpression::ExternalCommand(ExternalCommand::new(inner.into())), - outer.into(), - ) + RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).spanned(outer) } pub(crate) fn it_variable(inner: impl Into, outer: impl Into) -> Expression { - Tagged::from_simple_spanned_item( - RawExpression::Variable(Variable::It(inner.into())), - outer.into(), - ) + RawExpression::Variable(Variable::It(inner.into())).spanned(outer) } } impl ToDebug for Expression { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - match self.item() { - RawExpression::Literal(l) => l.tagged(self.span()).fmt_debug(f, source), + match &self.item { + RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source), RawExpression::FilePath(p) => write!(f, "{}", p.display()), + RawExpression::ExternalWord => write!(f, "{}", self.span.slice(source)), + RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)), RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s), RawExpression::Variable(Variable::It(_)) => write!(f, "$it"), RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)), @@ -219,27 +281,54 @@ impl ToDebug for Expression { } } -impl From> for Expression { - fn from(path: Tagged) -> Expression { +impl From> for Expression { + fn from(path: Spanned) -> Expression { path.map(|p| RawExpression::Path(Box::new(p))) } } +/// Literals are expressions that are: +/// +/// 1. Copy +/// 2. Can be evaluated without additional context +/// 3. Evaluation cannot produce an error #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Literal { Number(Number), Size(Number, Unit), String(Span), + GlobPattern(String), Bare, } -impl ToDebug for Tagged<&Literal> { +impl std::fmt::Display for Tagged { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", Tagged::new(self.tag.clone(), &self.item)) + } +} + +impl std::fmt::Display for Tagged<&Literal> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let span = self.tag.span; + + match &self.item { + Literal::Number(number) => write!(f, "{}", number), + Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()), + Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()), + Literal::GlobPattern(_) => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()), + Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()), + } + } +} + +impl ToDebug for Spanned<&Literal> { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { - match self.item() { - Literal::Number(number) => write!(f, "{:?}", *number), + match self.item { + Literal::Number(number) => write!(f, "{:?}", number), Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit), - Literal::String(span) => write!(f, "{}", span.slice(source)), - Literal::Bare => write!(f, "{}", self.span().slice(source)), + Literal::String(tag) => write!(f, "{}", tag.slice(source)), + Literal::GlobPattern(_) => write!(f, "{}", self.span.slice(source)), + Literal::Bare => write!(f, "{}", self.span.slice(source)), } } } @@ -251,6 +340,7 @@ impl Literal { Literal::Size(..) => "size", Literal::String(..) => "string", Literal::Bare => "string", + Literal::GlobPattern(_) => "pattern", } } } @@ -260,3 +350,12 @@ pub enum Variable { It(Span), Other(Span), } + +impl std::fmt::Display for Variable { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Variable::It(_) => write!(f, "$it"), + Variable::Other(span) => write!(f, "${{ {}..{} }}", span.start(), span.end()), + } + } +} diff --git a/src/parser/hir/baseline_parse.rs b/src/parser/hir/baseline_parse.rs index d76a88d510..87c2771955 100644 --- a/src/parser/hir/baseline_parse.rs +++ b/src/parser/hir/baseline_parse.rs @@ -1,79 +1,2 @@ -use crate::context::Context; -use crate::parser::{hir, RawToken, Token}; -use crate::Text; -use std::path::PathBuf; - -pub fn baseline_parse_single_token(token: &Token, source: &Text) -> hir::Expression { - match *token.item() { - RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.span()), - RawToken::Size(int, unit) => { - hir::Expression::size(int.to_number(source), unit, token.span()) - } - RawToken::String(span) => hir::Expression::string(span, token.span()), - RawToken::Variable(span) if span.slice(source) == "it" => { - hir::Expression::it_variable(span, token.span()) - } - RawToken::Variable(span) => hir::Expression::variable(span, token.span()), - RawToken::External(span) => hir::Expression::external_command(span, token.span()), - RawToken::Bare => hir::Expression::bare(token.span()), - } -} - -pub fn baseline_parse_token_as_number(token: &Token, source: &Text) -> hir::Expression { - match *token.item() { - RawToken::Variable(span) if span.slice(source) == "it" => { - hir::Expression::it_variable(span, token.span()) - } - RawToken::External(span) => hir::Expression::external_command(span, token.span()), - RawToken::Variable(span) => hir::Expression::variable(span, token.span()), - RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.span()), - RawToken::Size(number, unit) => { - hir::Expression::size(number.to_number(source), unit, token.span()) - } - RawToken::Bare => hir::Expression::bare(token.span()), - RawToken::String(span) => hir::Expression::string(span, token.span()), - } -} - -pub fn baseline_parse_token_as_string(token: &Token, source: &Text) -> hir::Expression { - match *token.item() { - RawToken::Variable(span) if span.slice(source) == "it" => { - hir::Expression::it_variable(span, token.span()) - } - RawToken::External(span) => hir::Expression::external_command(span, token.span()), - RawToken::Variable(span) => hir::Expression::variable(span, token.span()), - RawToken::Number(_) => hir::Expression::bare(token.span()), - RawToken::Size(_, _) => hir::Expression::bare(token.span()), - RawToken::Bare => hir::Expression::bare(token.span()), - RawToken::String(span) => hir::Expression::string(span, token.span()), - } -} - -pub fn baseline_parse_token_as_path( - token: &Token, - context: &Context, - source: &Text, -) -> hir::Expression { - match *token.item() { - RawToken::Variable(span) if span.slice(source) == "it" => { - hir::Expression::it_variable(span, token.span()) - } - RawToken::External(span) => hir::Expression::external_command(span, token.span()), - RawToken::Variable(span) => hir::Expression::variable(span, token.span()), - RawToken::Number(_) => hir::Expression::bare(token.span()), - RawToken::Size(_, _) => hir::Expression::bare(token.span()), - RawToken::Bare => hir::Expression::file_path( - expand_path(token.span().slice(source), context), - token.span(), - ), - RawToken::String(span) => { - hir::Expression::file_path(expand_path(span.slice(source), context), token.span()) - } - } -} - -pub fn expand_path(string: &str, context: &Context) -> PathBuf { - let expanded = shellexpand::tilde_with_context(string, || context.shell_manager.homedir()); - - PathBuf::from(expanded.as_ref()) -} +#[cfg(test)] +mod tests; diff --git a/src/parser/hir/baseline_parse/tests.rs b/src/parser/hir/baseline_parse/tests.rs new file mode 100644 index 0000000000..ddd4af4930 --- /dev/null +++ b/src/parser/hir/baseline_parse/tests.rs @@ -0,0 +1,117 @@ +use crate::commands::classified::InternalCommand; +use crate::commands::ClassifiedCommand; +use crate::env::host::BasicHost; +use crate::parser::hir; +use crate::parser::hir::syntax_shape::*; +use crate::parser::hir::TokensIterator; +use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b}; +use crate::parser::TokenNode; +use crate::{Span, SpannedItem, Tag, Text}; +use pretty_assertions::assert_eq; +use std::fmt::Debug; + +#[test] +fn test_parse_string() { + parse_tokens(StringShape, vec![b::string("hello")], |tokens| { + hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span()) + }); +} + +#[test] +fn test_parse_path() { + parse_tokens( + VariablePathShape, + vec![b::var("it"), b::op("."), b::bare("cpu")], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let bare = tokens[2].expect_bare(); + hir::Expression::path( + hir::Expression::it_variable(inner_var, outer_var), + vec!["cpu".spanned(bare)], + outer_var.until(bare), + ) + }, + ); + + parse_tokens( + VariablePathShape, + vec![ + b::var("cpu"), + b::op("."), + b::bare("amount"), + b::op("."), + b::string("max ghz"), + ], + |tokens| { + let (outer_var, inner_var) = tokens[0].expect_var(); + let amount = tokens[2].expect_bare(); + let (outer_max_ghz, _) = tokens[4].expect_string(); + + hir::Expression::path( + hir::Expression::variable(inner_var, outer_var), + vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)], + outer_var.until(outer_max_ghz), + ) + }, + ); +} + +#[test] +fn test_parse_command() { + parse_tokens( + ClassifiedCommandShape, + vec![b::bare("ls"), b::sp(), b::pattern("*.txt")], + |tokens| { + let bare = tokens[0].expect_bare(); + let pattern = tokens[2].expect_pattern(); + + ClassifiedCommand::Internal(InternalCommand::new( + "ls".to_string(), + Tag { + span: bare, + anchor: None, + }, + hir::Call { + head: Box::new(hir::RawExpression::Command(bare).spanned(bare)), + positional: Some(vec![hir::Expression::pattern("*.txt", pattern)]), + named: None, + }, + )) + // hir::Expression::path( + // hir::Expression::variable(inner_var, outer_var), + // vec!["cpu".tagged(bare)], + // outer_var.until(bare), + // ) + }, + ); +} + +fn parse_tokens( + shape: impl ExpandSyntax, + tokens: Vec, + expected: impl FnOnce(&[TokenNode]) -> T, +) { + let tokens = b::token_list(tokens); + let (tokens, source) = b::build(tokens); + + ExpandContext::with_empty(&Text::from(source), |context| { + let tokens = tokens.expect_list(); + let mut iterator = TokensIterator::all(tokens, *context.span()); + + let expr = expand_syntax(&shape, &mut iterator, &context); + + let expr = match expr { + Ok(expr) => expr, + Err(err) => { + crate::cli::print_err(err, &BasicHost, context.source().clone()); + panic!("Parse failed"); + } + }; + + assert_eq!(expr, expected(tokens)); + }) +} + +fn inner_string_span(span: Span) -> Span { + Span::new(span.start() + 1, span.end() - 1) +} diff --git a/src/parser/hir/baseline_parse_tokens.rs b/src/parser/hir/baseline_parse_tokens.rs deleted file mode 100644 index ca9b0bb37c..0000000000 --- a/src/parser/hir/baseline_parse_tokens.rs +++ /dev/null @@ -1,454 +0,0 @@ -use crate::context::Context; -use crate::errors::ShellError; -use crate::parser::{ - hir, - hir::{ - baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path, - baseline_parse_token_as_string, - }, - DelimitedNode, Delimiter, PathNode, RawToken, TokenNode, -}; -use crate::{Span, Tag, Tagged, TaggedItem, Text}; -use derive_new::new; -use log::trace; -use serde::{Deserialize, Serialize}; - -pub fn baseline_parse_tokens( - token_nodes: &mut TokensIterator<'_>, - context: &Context, - source: &Text, - syntax_type: SyntaxType, -) -> Result, ShellError> { - let mut exprs: Vec = vec![]; - - loop { - if token_nodes.at_end() { - break; - } - - let expr = baseline_parse_next_expr(token_nodes, context, source, syntax_type)?; - exprs.push(expr); - } - - Ok(exprs) -} - - -#[derive(Debug, Copy, Clone, Serialize, Deserialize)] -pub enum SyntaxType { - Any, - List, - Literal, - String, - Member, - Variable, - Number, - Path, - Binary, - Block, - Boolean, -} - -impl std::fmt::Display for SyntaxType { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match self { - SyntaxType::Any => write!(f, "Any"), - SyntaxType::List => write!(f, "List"), - SyntaxType::Literal => write!(f, "Literal"), - SyntaxType::String => write!(f, "String"), - SyntaxType::Member => write!(f, "Member"), - SyntaxType::Variable => write!(f, "Variable"), - SyntaxType::Number => write!(f, "Number"), - SyntaxType::Path => write!(f, "Path"), - SyntaxType::Binary => write!(f, "Binary"), - SyntaxType::Block => write!(f, "Block"), - SyntaxType::Boolean => write!(f, "Boolean") - } - } -} - -pub fn baseline_parse_next_expr( - tokens: &mut TokensIterator, - context: &Context, - source: &Text, - syntax_type: SyntaxType, -) -> Result { - let next = tokens - .next() - .ok_or_else(|| ShellError::string("Expected token, found none"))?; - - trace!(target: "nu::parser::parse_one_expr", "syntax_type={:?}, token={:?}", syntax_type, next); - - match (syntax_type, next) { - (SyntaxType::Path, TokenNode::Token(token)) => { - return Ok(baseline_parse_token_as_path(token, context, source)) - } - - (SyntaxType::Path, token) => { - return Err(ShellError::type_error( - "Path", - token.type_name().simple_spanned(token.span()), - )) - } - - (SyntaxType::String, TokenNode::Token(token)) => { - return Ok(baseline_parse_token_as_string(token, source)); - } - - (SyntaxType::String, token) => { - return Err(ShellError::type_error( - "String", - token.type_name().simple_spanned(token.span()), - )) - } - - (SyntaxType::Number, TokenNode::Token(token)) => { - return Ok(baseline_parse_token_as_number(token, source)); - } - - (SyntaxType::Number, token) => { - return Err(ShellError::type_error( - "Numeric", - token.type_name().simple_spanned(token.span()), - )) - } - - // TODO: More legit member processing - (SyntaxType::Member, TokenNode::Token(token)) => { - return Ok(baseline_parse_token_as_string(token, source)); - } - - (SyntaxType::Member, token) => { - return Err(ShellError::type_error( - "member", - token.type_name().simple_spanned(token.span()), - )) - } - - (SyntaxType::Any, _) => {} - (SyntaxType::List, _) => {} - (SyntaxType::Literal, _) => {} - (SyntaxType::Variable, _) => {} - (SyntaxType::Binary, _) => {} - (SyntaxType::Block, _) => {} - (SyntaxType::Boolean, _) => {} - }; - - let first = baseline_parse_semantic_token(next, context, source)?; - - let possible_op = tokens.peek(); - - let op = match possible_op { - Some(TokenNode::Operator(op)) => op.clone(), - _ => return Ok(first), - }; - - tokens.next(); - - let second = match tokens.next() { - None => { - return Err(ShellError::labeled_error( - "Expected something after an operator", - "operator", - op.span(), - )) - } - Some(token) => baseline_parse_semantic_token(token, context, source)?, - }; - - // We definitely have a binary expression here -- let's see if we should coerce it into a block - - match syntax_type { - SyntaxType::Any => { - let span = (first.span().start, second.span().end); - let binary = hir::Binary::new(first, op, second); - let binary = hir::RawExpression::Binary(Box::new(binary)); - let binary = Tagged::from_simple_spanned_item(binary, span); - - Ok(binary) - } - - SyntaxType::Block => { - let span = (first.span().start, second.span().end); - - let path: Tagged = match first { - Tagged { - item: hir::RawExpression::Literal(hir::Literal::Bare), - tag: Tag { span, .. }, - } => { - let string = - Tagged::from_simple_spanned_item(span.slice(source).to_string(), span); - let path = hir::Path::new( - Tagged::from_simple_spanned_item( - // TODO: Deal with synthetic nodes that have no representation at all in source - hir::RawExpression::Variable(hir::Variable::It(Span::from((0, 0)))), - (0, 0), - ), - vec![string], - ); - let path = hir::RawExpression::Path(Box::new(path)); - Tagged::from_simple_spanned_item(path, first.span()) - } - Tagged { - item: hir::RawExpression::Literal(hir::Literal::String(inner)), - tag: Tag { span, .. }, - } => { - let string = - Tagged::from_simple_spanned_item(inner.slice(source).to_string(), span); - let path = hir::Path::new( - Tagged::from_simple_spanned_item( - // TODO: Deal with synthetic nodes that have no representation at all in source - hir::RawExpression::Variable(hir::Variable::It(Span::from((0, 0)))), - (0, 0), - ), - vec![string], - ); - let path = hir::RawExpression::Path(Box::new(path)); - Tagged::from_simple_spanned_item(path, first.span()) - } - Tagged { - item: hir::RawExpression::Variable(..), - .. - } => first, - Tagged { - tag: Tag { span, .. }, - item, - } => { - return Err(ShellError::labeled_error( - "The first part of an un-braced block must be a column name", - item.type_name(), - span, - )) - } - }; - - let binary = hir::Binary::new(path, op, second); - let binary = hir::RawExpression::Binary(Box::new(binary)); - let binary = Tagged::from_simple_spanned_item(binary, span); - - let block = hir::RawExpression::Block(vec![binary]); - let block = Tagged::from_simple_spanned_item(block, span); - - Ok(block) - } - - other => Err(ShellError::unimplemented(format!( - "coerce hint {:?}", - other - ))), - } -} - -pub fn baseline_parse_semantic_token( - token: &TokenNode, - context: &Context, - source: &Text, -) -> Result { - match token { - TokenNode::Token(token) => Ok(baseline_parse_single_token(token, source)), - TokenNode::Call(_call) => unimplemented!(), - TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, context, source), - TokenNode::Pipeline(_pipeline) => unimplemented!(), - TokenNode::Operator(op) => Err(ShellError::syntax_error( - "Unexpected operator".tagged(op.tag), - )), - TokenNode::Flag(flag) => Err(ShellError::syntax_error("Unexpected flag".tagged(flag.tag))), - TokenNode::Member(span) => Err(ShellError::syntax_error( - "BUG: Top-level member".tagged(span), - )), - TokenNode::Whitespace(span) => Err(ShellError::syntax_error( - "BUG: Whitespace found during parse".tagged(span), - )), - TokenNode::Error(error) => Err(*error.item.clone()), - TokenNode::Path(path) => baseline_parse_path(path, context, source), - } -} - -pub fn baseline_parse_delimited( - token: &Tagged, - context: &Context, - source: &Text, -) -> Result { - match token.delimiter() { - Delimiter::Brace => { - let children = token.children(); - let exprs = baseline_parse_tokens( - &mut TokensIterator::new(children), - context, - source, - SyntaxType::Any, - )?; - - let expr = hir::RawExpression::Block(exprs); - Ok(Tagged::from_simple_spanned_item(expr, token.span())) - } - Delimiter::Paren => unimplemented!(), - Delimiter::Square => { - let children = token.children(); - let exprs = baseline_parse_tokens( - &mut TokensIterator::new(children), - context, - source, - SyntaxType::Any, - )?; - - let expr = hir::RawExpression::List(exprs); - Ok(expr.tagged(Tag::unknown_origin(token.span()))) - } - } -} - -pub fn baseline_parse_path( - token: &Tagged, - context: &Context, - source: &Text, -) -> Result { - let head = baseline_parse_semantic_token(token.head(), context, source)?; - - let mut tail = vec![]; - - for part in token.tail() { - let string = match part { - TokenNode::Token(token) => match token.item() { - RawToken::Bare => token.span().slice(source), - RawToken::String(span) => span.slice(source), - RawToken::Number(_) - | RawToken::Size(..) - | RawToken::Variable(_) - | RawToken::External(_) => { - return Err(ShellError::type_error( - "String", - token.type_name().simple_spanned(part), - )) - } - }, - - TokenNode::Member(span) => span.slice(source), - - // TODO: Make this impossible - other => { - return Err(ShellError::syntax_error( - format!("{} in path", other.type_name()).tagged(other.span()), - )) - } - } - .to_string(); - - tail.push(string.simple_spanned(part)); - } - - Ok(hir::path(head, tail).simple_spanned(token).into()) -} - -#[derive(Debug, new)] -pub struct TokensIterator<'a> { - tokens: &'a [TokenNode], - #[new(default)] - index: usize, - #[new(default)] - seen: indexmap::IndexSet, -} - -impl TokensIterator<'_> { - pub fn remove(&mut self, position: usize) { - self.seen.insert(position); - } - - pub fn len(&self) -> usize { - self.tokens.len() - } - - pub fn at_end(&self) -> bool { - for index in self.index..self.tokens.len() { - if !self.seen.contains(&index) { - return false; - } - } - - true - } - - pub fn advance(&mut self) { - self.seen.insert(self.index); - self.index += 1; - } - - pub fn extract(&mut self, f: impl Fn(&TokenNode) -> Option) -> Option<(usize, T)> { - for (i, item) in self.tokens.iter().enumerate() { - if self.seen.contains(&i) { - continue; - } - - match f(item) { - None => { - continue; - } - Some(value) => { - self.seen.insert(i); - return Some((i, value)); - } - } - } - - None - } - - pub fn move_to(&mut self, pos: usize) { - self.index = pos; - } - - pub fn restart(&mut self) { - self.index = 0; - } - - pub fn clone(&self) -> TokensIterator { - TokensIterator { - tokens: self.tokens, - index: self.index, - seen: self.seen.clone(), - } - } - - pub fn peek(&self) -> Option<&TokenNode> { - let mut tokens = self.clone(); - - tokens.next() - } - - pub fn debug_remaining(&self) -> Vec { - let mut tokens = self.clone(); - tokens.restart(); - tokens.cloned().collect() - } -} - -impl<'a> Iterator for TokensIterator<'a> { - type Item = &'a TokenNode; - - fn next(&mut self) -> Option<&'a TokenNode> { - loop { - if self.index >= self.tokens.len() { - return None; - } - - if self.seen.contains(&self.index) { - self.advance(); - continue; - } - - if self.index >= self.tokens.len() { - return None; - } - - match &self.tokens[self.index] { - TokenNode::Whitespace(_) => { - self.advance(); - } - other => { - self.advance(); - return Some(other); - } - } - } - } -} diff --git a/src/parser/hir/binary.rs b/src/parser/hir/binary.rs index 02a4d416e4..67c597cb86 100644 --- a/src/parser/hir/binary.rs +++ b/src/parser/hir/binary.rs @@ -1,6 +1,6 @@ use crate::parser::{hir::Expression, Operator}; use crate::prelude::*; -use crate::Tagged; + use derive_new::new; use getset::Getters; use serde::{Deserialize, Serialize}; @@ -12,10 +12,16 @@ use std::fmt; #[get = "pub(crate)"] pub struct Binary { left: Expression, - op: Tagged, + op: Spanned, right: Expression, } +impl fmt::Display for Binary { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "({} {} {})", self.op.as_str(), self.left, self.right) + } +} + impl ToDebug for Binary { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { write!(f, "{}", self.left.debug(source))?; diff --git a/src/parser/hir/expand_external_tokens.rs b/src/parser/hir/expand_external_tokens.rs new file mode 100644 index 0000000000..5733a30c81 --- /dev/null +++ b/src/parser/hir/expand_external_tokens.rs @@ -0,0 +1,226 @@ +use crate::errors::ShellError; +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::{ + hir::syntax_shape::{ + color_syntax, expand_atom, AtomicToken, ColorSyntax, ExpandContext, ExpansionRule, + MaybeSpaceShape, + }, + TokenNode, TokensIterator, +}; +use crate::{Span, Spanned, Text}; + +pub fn expand_external_tokens( + token_nodes: &mut TokensIterator<'_>, + source: &Text, +) -> Result>, ShellError> { + let mut out: Vec> = vec![]; + + loop { + if let Some(span) = expand_next_expression(token_nodes)? { + out.push(span.spanned_string(source)); + } else { + break; + } + } + + Ok(out) +} + +#[derive(Debug, Copy, Clone)] +pub struct ExternalTokensShape; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for ExternalTokensShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + loop { + // Allow a space + color_syntax(&MaybeSpaceShape, token_nodes, context, shapes); + + // Process an external expression. External expressions are mostly words, with a + // few exceptions (like $variables and path expansion rules) + match color_syntax(&ExternalExpression, token_nodes, context, shapes).1 { + ExternalExpressionResult::Eof => break, + ExternalExpressionResult::Processed => continue, + } + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for ExternalTokensShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "ExternalTokensShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + loop { + // Allow a space + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // Process an external expression. External expressions are mostly words, with a + // few exceptions (like $variables and path expansion rules) + match color_syntax(&ExternalExpression, token_nodes, context).1 { + ExternalExpressionResult::Eof => break, + ExternalExpressionResult::Processed => continue, + } + } + } +} + +pub fn expand_next_expression( + token_nodes: &mut TokensIterator<'_>, +) -> Result, ShellError> { + let first = token_nodes.next_non_ws(); + + let first = match first { + None => return Ok(None), + Some(v) => v, + }; + + let first = triage_external_head(first)?; + let mut last = first; + + loop { + let continuation = triage_continuation(token_nodes)?; + + if let Some(continuation) = continuation { + last = continuation; + } else { + break; + } + } + + Ok(Some(first.until(last))) +} + +fn triage_external_head(node: &TokenNode) -> Result { + Ok(match node { + TokenNode::Token(token) => token.span, + TokenNode::Call(_call) => unimplemented!("TODO: OMG"), + TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"), + TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"), + TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"), + TokenNode::Flag(flag) => flag.span, + TokenNode::Whitespace(_whitespace) => { + unreachable!("This function should be called after next_non_ws()") + } + TokenNode::Error(_error) => unimplemented!("TODO: OMG"), + }) +} + +fn triage_continuation<'a, 'b>( + nodes: &'a mut TokensIterator<'b>, +) -> Result, ShellError> { + let mut peeked = nodes.peek_any(); + + let node = match peeked.node { + None => return Ok(None), + Some(node) => node, + }; + + match &node { + node if node.is_whitespace() => return Ok(None), + TokenNode::Token(..) | TokenNode::Flag(..) => {} + TokenNode::Call(..) => unimplemented!("call"), + TokenNode::Nodes(..) => unimplemented!("nodes"), + TokenNode::Delimited(..) => unimplemented!("delimited"), + TokenNode::Pipeline(..) => unimplemented!("pipeline"), + TokenNode::Whitespace(..) => unimplemented!("whitespace"), + TokenNode::Error(..) => unimplemented!("error"), + } + + peeked.commit(); + Ok(Some(node.span())) +} + +#[must_use] +enum ExternalExpressionResult { + Eof, + Processed, +} + +#[derive(Debug, Copy, Clone)] +struct ExternalExpression; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for ExternalExpression { + type Info = ExternalExpressionResult; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> ExternalExpressionResult { + let atom = match expand_atom( + token_nodes, + "external word", + context, + ExpansionRule::permissive(), + ) { + Err(_) => unreachable!("TODO: separate infallible expand_atom"), + Ok(Spanned { + item: AtomicToken::Eof { .. }, + .. + }) => return ExternalExpressionResult::Eof, + Ok(atom) => atom, + }; + + atom.color_tokens(shapes); + return ExternalExpressionResult::Processed; + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for ExternalExpression { + type Info = ExternalExpressionResult; + type Input = (); + + fn name(&self) -> &'static str { + "ExternalExpression" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> ExternalExpressionResult { + let atom = match expand_atom( + token_nodes, + "external word", + context, + ExpansionRule::permissive(), + ) { + Err(_) => unreachable!("TODO: separate infallible expand_atom"), + Ok(Spanned { + item: AtomicToken::Eof { .. }, + .. + }) => return ExternalExpressionResult::Eof, + Ok(atom) => atom, + }; + + token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); + return ExternalExpressionResult::Processed; + } +} diff --git a/src/parser/hir/external_command.rs b/src/parser/hir/external_command.rs index 8511cce1e0..df71328cab 100644 --- a/src/parser/hir/external_command.rs +++ b/src/parser/hir/external_command.rs @@ -9,7 +9,7 @@ use std::fmt; )] #[get = "pub(crate)"] pub struct ExternalCommand { - name: Span, + pub(crate) name: Span, } impl ToDebug for ExternalCommand { diff --git a/src/parser/hir/named.rs b/src/parser/hir/named.rs index 96d5132fb8..f7387e4fd4 100644 --- a/src/parser/hir/named.rs +++ b/src/parser/hir/named.rs @@ -1,7 +1,6 @@ use crate::parser::hir::Expression; use crate::parser::Flag; use crate::prelude::*; -use crate::Span; use derive_new::new; use indexmap::IndexMap; use log::trace; @@ -11,7 +10,7 @@ use std::fmt; #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub enum NamedValue { AbsentSwitch, - PresentSwitch(Span), + PresentSwitch(Tag), AbsentValue, Value(Expression), } @@ -27,7 +26,7 @@ impl ToDebug for NamedArguments { for (name, value) in &self.named { match value { NamedValue::AbsentSwitch => continue, - NamedValue::PresentSwitch(span) => write!(f, " --{}", span.slice(source))?, + NamedValue::PresentSwitch(tag) => write!(f, " --{}", tag.slice(source))?, NamedValue::AbsentValue => continue, NamedValue::Value(expr) => write!(f, " --{} {}", name, expr.debug(source))?, } @@ -44,9 +43,13 @@ impl NamedArguments { match switch { None => self.named.insert(name.into(), NamedValue::AbsentSwitch), - Some(flag) => self - .named - .insert(name, NamedValue::PresentSwitch(*flag.name())), + Some(flag) => self.named.insert( + name, + NamedValue::PresentSwitch(Tag { + span: *flag.name(), + anchor: None, + }), + ), }; } diff --git a/src/parser/hir/path.rs b/src/parser/hir/path.rs index f43edf1762..5867132986 100644 --- a/src/parser/hir/path.rs +++ b/src/parser/hir/path.rs @@ -1,18 +1,47 @@ use crate::parser::hir::Expression; use crate::prelude::*; -use crate::Tagged; use derive_new::new; -use getset::Getters; +use getset::{Getters, MutGetters}; use serde::{Deserialize, Serialize}; use std::fmt; #[derive( - Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new, + Debug, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + Getters, + MutGetters, + Serialize, + Deserialize, + new, )] #[get = "pub(crate)"] pub struct Path { head: Expression, - tail: Vec>, + #[get_mut = "pub(crate)"] + tail: Vec>, +} + +impl fmt::Display for Path { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.head)?; + + for entry in &self.tail { + write!(f, ".{}", entry.item)?; + } + + Ok(()) + } +} + +impl Path { + pub(crate) fn parts(self) -> (Expression, Vec>) { + (self.head, self.tail) + } } impl ToDebug for Path { @@ -20,7 +49,7 @@ impl ToDebug for Path { write!(f, "{}", self.head.debug(source))?; for part in &self.tail { - write!(f, ".{}", part.item())?; + write!(f, ".{}", part.item)?; } Ok(()) diff --git a/src/parser/hir/syntax_shape.rs b/src/parser/hir/syntax_shape.rs new file mode 100644 index 0000000000..a38a77500b --- /dev/null +++ b/src/parser/hir/syntax_shape.rs @@ -0,0 +1,1699 @@ +mod block; +mod expression; +pub(crate) mod flat_shape; + +use crate::cli::external_command; +use crate::commands::{ + classified::{ClassifiedPipeline, InternalCommand}, + ClassifiedCommand, Command, +}; +use crate::parser::hir::expand_external_tokens::ExternalTokensShape; +use crate::parser::hir::syntax_shape::block::AnyBlockShape; +use crate::parser::hir::tokens_iterator::Peeked; +use crate::parser::parse_command::{parse_command_tail, CommandTailShape}; +use crate::parser::{ + hir, + hir::{debug_tokens, TokensIterator}, + Operator, RawToken, TokenNode, +}; +use crate::prelude::*; +use derive_new::new; +use getset::Getters; +use log::{self, trace}; +use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; + +pub(crate) use self::expression::atom::{expand_atom, AtomicToken, ExpansionRule}; +pub(crate) use self::expression::delimited::{ + color_delimited_square, expand_delimited_square, DelimitedShape, +}; +pub(crate) use self::expression::file_path::FilePathShape; +pub(crate) use self::expression::list::{BackoffColoringMode, ExpressionListShape}; +pub(crate) use self::expression::number::{IntShape, NumberShape}; +pub(crate) use self::expression::pattern::{BarePatternShape, PatternShape}; +pub(crate) use self::expression::string::StringShape; +pub(crate) use self::expression::unit::UnitShape; +pub(crate) use self::expression::variable_path::{ + ColorableDotShape, ColumnPathShape, DotShape, ExpressionContinuation, + ExpressionContinuationShape, MemberShape, PathTailShape, VariablePathShape, +}; +pub(crate) use self::expression::{continue_expression, AnyExpressionShape}; +pub(crate) use self::flat_shape::FlatShape; + +#[cfg(not(coloring_in_tokens))] +use crate::parser::parse::pipeline::Pipeline; +#[cfg(not(coloring_in_tokens))] +use log::log_enabled; + +#[derive(Debug, Copy, Clone, Serialize, Deserialize)] +pub enum SyntaxShape { + Any, + List, + String, + Member, + ColumnPath, + Number, + Int, + Path, + Pattern, + Block, +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for SyntaxShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + match self { + SyntaxShape::Any => { + color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) + } + SyntaxShape::List => { + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + Ok(()) + } + SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context, shapes), + SyntaxShape::String => color_fallible_syntax_with( + &StringShape, + &FlatShape::String, + token_nodes, + context, + shapes, + ), + SyntaxShape::Member => { + color_fallible_syntax(&MemberShape, token_nodes, context, shapes) + } + SyntaxShape::ColumnPath => { + color_fallible_syntax(&ColumnPathShape, token_nodes, context, shapes) + } + SyntaxShape::Number => { + color_fallible_syntax(&NumberShape, token_nodes, context, shapes) + } + SyntaxShape::Path => { + color_fallible_syntax(&FilePathShape, token_nodes, context, shapes) + } + SyntaxShape::Pattern => { + color_fallible_syntax(&PatternShape, token_nodes, context, shapes) + } + SyntaxShape::Block => { + color_fallible_syntax(&AnyBlockShape, token_nodes, context, shapes) + } + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for SyntaxShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "SyntaxShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + match self { + SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context), + SyntaxShape::List => { + color_syntax(&ExpressionListShape, token_nodes, context); + Ok(()) + } + SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context), + SyntaxShape::String => { + color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context) + } + SyntaxShape::Member => color_fallible_syntax(&MemberShape, token_nodes, context), + SyntaxShape::ColumnPath => { + color_fallible_syntax(&ColumnPathShape, token_nodes, context) + } + SyntaxShape::Number => color_fallible_syntax(&NumberShape, token_nodes, context), + SyntaxShape::Path => color_fallible_syntax(&FilePathShape, token_nodes, context), + SyntaxShape::Pattern => color_fallible_syntax(&PatternShape, token_nodes, context), + SyntaxShape::Block => color_fallible_syntax(&AnyBlockShape, token_nodes, context), + } + } +} + +impl ExpandExpression for SyntaxShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + match self { + SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context), + SyntaxShape::List => Err(ShellError::unimplemented("SyntaxShape:List")), + SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context), + SyntaxShape::String => expand_expr(&StringShape, token_nodes, context), + SyntaxShape::Member => { + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + Ok(syntax.to_expr()) + } + SyntaxShape::ColumnPath => { + let Tagged { item: members, tag } = + expand_syntax(&ColumnPathShape, token_nodes, context)?; + + Ok(hir::Expression::list( + members.into_iter().map(|s| s.to_expr()).collect(), + tag, + )) + } + SyntaxShape::Number => expand_expr(&NumberShape, token_nodes, context), + SyntaxShape::Path => expand_expr(&FilePathShape, token_nodes, context), + SyntaxShape::Pattern => expand_expr(&PatternShape, token_nodes, context), + SyntaxShape::Block => expand_expr(&AnyBlockShape, token_nodes, context), + } + } +} + +impl std::fmt::Display for SyntaxShape { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + SyntaxShape::Any => write!(f, "Any"), + SyntaxShape::List => write!(f, "List"), + SyntaxShape::String => write!(f, "String"), + SyntaxShape::Int => write!(f, "Integer"), + SyntaxShape::Member => write!(f, "Member"), + SyntaxShape::ColumnPath => write!(f, "ColumnPath"), + SyntaxShape::Number => write!(f, "Number"), + SyntaxShape::Path => write!(f, "Path"), + SyntaxShape::Pattern => write!(f, "Pattern"), + SyntaxShape::Block => write!(f, "Block"), + } + } +} + +#[derive(Getters, new)] +pub struct ExpandContext<'context> { + #[get = "pub(crate)"] + registry: &'context CommandRegistry, + #[get = "pub(crate)"] + span: Span, + #[get = "pub(crate)"] + source: &'context Text, + homedir: Option, +} + +impl<'context> ExpandContext<'context> { + pub(crate) fn homedir(&self) -> Option<&Path> { + self.homedir.as_ref().map(|h| h.as_path()) + } + + #[cfg(test)] + pub fn with_empty(source: &Text, callback: impl FnOnce(ExpandContext)) { + let mut registry = CommandRegistry::new(); + registry.insert( + "ls", + crate::commands::whole_stream_command(crate::commands::LS), + ); + + callback(ExpandContext { + registry: ®istry, + span: Span::unknown(), + source, + homedir: None, + }) + } +} + +pub trait TestSyntax: std::fmt::Debug + Copy { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Option>; +} + +pub trait ExpandExpression: std::fmt::Debug + Copy { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +#[cfg(coloring_in_tokens)] +pub trait FallibleColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn name(&self) -> &'static str; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +#[cfg(not(coloring_in_tokens))] +pub trait FallibleColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result; +} + +#[cfg(not(coloring_in_tokens))] +pub trait ColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info; +} + +#[cfg(coloring_in_tokens)] +pub trait ColorSyntax: std::fmt::Debug + Copy { + type Info; + type Input; + + fn name(&self) -> &'static str; + + fn color_syntax<'a, 'b>( + &self, + input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info; +} + +pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy { + type Output: std::fmt::Debug; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result; +} + +pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); + + let result = shape.expand_syntax(token_nodes, context); + + match result { + Err(err) => { + trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes.state(), context.source)); + Err(err) + } + + Ok(result) => { + trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes.state(), context.source)); + Ok(result) + } + } +} + +#[cfg(not(coloring_in_tokens))] +pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> ((), U) { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); + + let len = shapes.len(); + let result = shape.color_syntax(&(), token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + ((), result) +} + +#[cfg(coloring_in_tokens)] +pub fn color_syntax<'a, 'b, T: ColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> ((), U) { + ( + (), + token_nodes.color_frame(shape.name(), |token_nodes| { + shape.color_syntax(&(), token_nodes, context) + }), + ) +} + +#[cfg(not(coloring_in_tokens))] +pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); + + if token_nodes.at_end() { + trace!(target: "nu::color_syntax", "at eof"); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let len = shapes.len(); + let result = shape.color_syntax(&(), token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + result +} + +#[cfg(coloring_in_tokens)] +pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax, U>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + token_nodes.color_fallible_frame(shape.name(), |token_nodes| { + shape.color_syntax(&(), token_nodes, context) + }) +} + +#[cfg(not(coloring_in_tokens))] +pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> ((), U) { + trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); + + let len = shapes.len(); + let result = shape.color_syntax(input, token_nodes, context, shapes); + + trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source)); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + trace!(target: "nu::color_syntax", "after {}", std::any::type_name::()); + + if len < shapes.len() { + for i in len..(shapes.len()) { + trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]); + } + } else { + trace!(target: "nu::color_syntax", "no new shapes"); + } + } + + ((), result) +} + +#[cfg(coloring_in_tokens)] +pub fn color_syntax_with<'a, 'b, T: ColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> ((), U) { + ( + (), + token_nodes.color_frame(shape.name(), |token_nodes| { + shape.color_syntax(input, token_nodes, context) + }), + ) +} + +#[cfg(not(coloring_in_tokens))] +pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result { + token_nodes.color_fallible_frame(std::any::type_name::(), |token_nodes| { + shape.color_syntax(input, token_nodes, context, shapes) + }) +} + +#[cfg(coloring_in_tokens)] +pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax, U, I>( + shape: &T, + input: &I, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + token_nodes.color_fallible_frame(shape.name(), |token_nodes| { + shape.color_syntax(input, token_nodes, context) + }) +} + +pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( + shape: &T, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result { + trace!(target: "nu::expand_expression", "before {} :: {:?}", std::any::type_name::(), debug_tokens(token_nodes.state(), context.source)); + + let result = shape.expand_expr(token_nodes, context); + + match result { + Err(err) => { + trace!(target: "nu::expand_expression", "error :: {} :: {:?}", err, debug_tokens(token_nodes.state(), context.source)); + Err(err) + } + + Ok(result) => { + trace!(target: "nu::expand_expression", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes.state(), context.source)); + Ok(result) + } + } +} + +impl ExpandSyntax for T { + type Output = hir::Expression; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + ExpandExpression::expand_expr(self, token_nodes, context) + } +} + +pub trait SkipSyntax: std::fmt::Debug + Copy { + fn skip<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError>; +} + +enum BarePathState { + Initial, + Seen(Span, Span), + Error(ShellError), +} + +impl BarePathState { + pub fn seen(self, span: Span) -> BarePathState { + match self { + BarePathState::Initial => BarePathState::Seen(span, span), + BarePathState::Seen(start, _) => BarePathState::Seen(start, span), + BarePathState::Error(err) => BarePathState::Error(err), + } + } + + pub fn end(self, peeked: Peeked, reason: impl Into) -> BarePathState { + match self { + BarePathState::Initial => BarePathState::Error(peeked.type_error(reason)), + BarePathState::Seen(start, end) => BarePathState::Seen(start, end), + BarePathState::Error(err) => BarePathState::Error(err), + } + } + + pub fn into_bare(self) -> Result { + match self { + BarePathState::Initial => unreachable!("into_bare in initial state"), + BarePathState::Seen(start, end) => Ok(start.until(end)), + BarePathState::Error(err) => Err(err), + } + } +} + +pub fn expand_bare<'a, 'b>( + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + predicate: impl Fn(&TokenNode) -> bool, +) -> Result { + let mut state = BarePathState::Initial; + + loop { + // Whitespace ends a word + let mut peeked = token_nodes.peek_any(); + + match peeked.node { + None => { + state = state.end(peeked, "word"); + break; + } + Some(node) => { + if predicate(node) { + state = state.seen(node.span()); + peeked.commit(); + } else { + state = state.end(peeked, "word"); + break; + } + } + } + } + + state.into_bare() +} + +#[derive(Debug, Copy, Clone)] +pub struct BarePathShape; + +impl ExpandSyntax for BarePathShape { + type Output = Span; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_bare(token_nodes, context, |token| match token { + TokenNode::Token(Spanned { + item: RawToken::Bare, + .. + }) + | TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + .. + }) => true, + + _ => false, + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BareShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for BareShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.peek_any_token("word", |token| match token { + // If it's a bare token, color it + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + shapes.push((*input).spanned(*span)); + Ok(()) + } + + // otherwise, fail + other => Err(ShellError::type_error("word", other.tagged_type_name())), + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for BareShape { + type Info = (); + type Input = FlatShape; + + fn name(&self) -> &'static str { + "BareShape" + } + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let span = token_nodes.peek_any_token("word", |token| match token { + // If it's a bare token, color it + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => Ok(span), + + // otherwise, fail + other => Err(ShellError::type_error("word", other.tagged_type_name())), + })?; + + token_nodes.color_shape((*input).spanned(*span)); + + Ok(()) + } +} + +impl ExpandSyntax for BareShape { + type Output = Spanned; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("word")?; + + match peeked.node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + peeked.commit(); + Ok(span.spanned_string(context.source)) + } + + other => Err(ShellError::type_error("word", other.tagged_type_name())), + } + } +} + +impl TestSyntax for BareShape { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Option> { + let peeked = token_nodes.peek_any(); + + match peeked.node { + Some(token) if token.is_bare() => Some(peeked), + _ => None, + } + } +} + +#[derive(Debug)] +pub enum CommandSignature { + Internal(Spanned>), + LiteralExternal { outer: Span, inner: Span }, + External(Span), + Expression(hir::Expression), +} + +impl CommandSignature { + pub fn to_expression(&self) -> hir::Expression { + match self { + CommandSignature::Internal(command) => { + let span = command.span; + hir::RawExpression::Command(span).spanned(span) + } + CommandSignature::LiteralExternal { outer, inner } => { + hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner)) + .spanned(*outer) + } + CommandSignature::External(span) => { + hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*span)).spanned(*span) + } + CommandSignature::Expression(expr) => expr.clone(), + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct PipelineShape; + +#[cfg(not(coloring_in_tokens))] +// The failure mode is if the head of the token stream is not a pipeline +impl FallibleColorSyntax for PipelineShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Make sure we're looking at a pipeline + let Pipeline { parts, .. } = + token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?; + + // Enumerate the pipeline parts + for part in parts { + // If the pipeline part has a prefix `|`, emit a pipe to color + if let Some(pipe) = part.pipe { + shapes.push(FlatShape::Pipe.spanned(pipe)); + } + + // Create a new iterator containing the tokens in the pipeline part to color + let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false); + + color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes); + color_syntax(&CommandShape, &mut token_nodes, context, shapes); + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +// The failure mode is if the head of the token stream is not a pipeline +impl FallibleColorSyntax for PipelineShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "PipelineShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // Make sure we're looking at a pipeline + let pipeline = token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?; + + let parts = &pipeline.parts[..]; + + // Enumerate the pipeline parts + for part in parts { + // If the pipeline part has a prefix `|`, emit a pipe to color + if let Some(pipe) = part.pipe { + token_nodes.color_shape(FlatShape::Pipe.spanned(pipe)) + } + + let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); + + token_nodes.child(tokens, move |token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context); + color_syntax(&CommandShape, token_nodes, context); + }); + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl ExpandSyntax for PipelineShape { + type Output = ClassifiedPipeline; + fn expand_syntax<'content, 'me>( + &self, + iterator: &'me mut TokensIterator<'content>, + context: &ExpandContext, + ) -> Result { + let source = context.source; + + let peeked = iterator.peek_any().not_eof("pipeline")?; + let pipeline = peeked.commit().as_pipeline()?; + + let parts = &pipeline.parts[..]; + + let mut out = vec![]; + + for part in parts { + let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); + + let classified = iterator.child(tokens, move |token_nodes| { + classify_command(token_nodes, context, &source) + })?; + + out.push(classified); + } + + Ok(ClassifiedPipeline { commands: out }) + } +} + +#[cfg(not(coloring_in_tokens))] +impl ExpandSyntax for PipelineShape { + type Output = ClassifiedPipeline; + fn expand_syntax<'content, 'me>( + &self, + iterator: &'me mut TokensIterator<'content>, + context: &ExpandContext, + ) -> Result { + let source = context.source; + + let peeked = iterator.peek_any().not_eof("pipeline")?; + let pipeline = peeked.commit().as_pipeline()?; + + let parts = &pipeline.parts[..]; + + let mut out = vec![]; + + for part in parts { + let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span); + + let classified = iterator.child(tokens, move |token_nodes| { + classify_command(token_nodes, context, &source) + })?; + + out.push(classified); + } + + Ok(ClassifiedPipeline { commands: out }) + } +} + +pub enum CommandHeadKind { + External, + Internal(Signature), +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandHeadShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for CommandHeadShape { + type Info = CommandHeadKind; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result { + // If we don't ultimately find a token, roll back + token_nodes.atomic(|token_nodes| { + // First, take a look at the next token + let atom = expand_atom( + token_nodes, + "command head", + context, + ExpansionRule::permissive(), + )?; + + match atom.item { + // If the head is an explicit external command (^cmd), color it as an external command + AtomicToken::ExternalCommand { .. } => { + shapes.push(FlatShape::ExternalCommand.spanned(atom.span)); + Ok(CommandHeadKind::External) + } + + // If the head is a word, it depends on whether it matches a registered internal command + AtomicToken::Word { text } => { + let name = text.slice(context.source); + + if context.registry.has(name) { + // If the registry has the command, color it as an internal command + shapes.push(FlatShape::InternalCommand.spanned(text)); + let command = context.registry.expect_command(name); + Ok(CommandHeadKind::Internal(command.signature())) + } else { + // Otherwise, color it as an external command + shapes.push(FlatShape::ExternalCommand.spanned(text)); + Ok(CommandHeadKind::External) + } + } + + // Otherwise, we're not actually looking at a command + _ => Err(ShellError::syntax_error( + "No command at the head".tagged(atom.span), + )), + } + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for CommandHeadShape { + type Info = CommandHeadKind; + type Input = (); + + fn name(&self) -> &'static str { + "CommandHeadShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // If we don't ultimately find a token, roll back + token_nodes.atomic(|token_nodes| { + // First, take a look at the next token + let atom = expand_atom( + token_nodes, + "command head", + context, + ExpansionRule::permissive(), + )?; + + match atom.item { + // If the head is an explicit external command (^cmd), color it as an external command + AtomicToken::ExternalCommand { .. } => { + token_nodes.color_shape(FlatShape::ExternalCommand.spanned(atom.span)); + Ok(CommandHeadKind::External) + } + + // If the head is a word, it depends on whether it matches a registered internal command + AtomicToken::Word { text } => { + let name = text.slice(context.source); + + if context.registry.has(name) { + // If the registry has the command, color it as an internal command + token_nodes.color_shape(FlatShape::InternalCommand.spanned(text)); + let command = context.registry.expect_command(name); + Ok(CommandHeadKind::Internal(command.signature())) + } else { + // Otherwise, color it as an external command + token_nodes.color_shape(FlatShape::ExternalCommand.spanned(text)); + Ok(CommandHeadKind::External) + } + } + + // Otherwise, we're not actually looking at a command + _ => Err(ShellError::syntax_error( + "No command at the head".tagged(atom.span), + )), + } + }) + } +} + +impl ExpandSyntax for CommandHeadShape { + type Output = CommandSignature; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let node = + parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_span, _| { + Ok(match token { + RawToken::ExternalCommand(span) => CommandSignature::LiteralExternal { + outer: token_span, + inner: span, + }, + RawToken::Bare => { + let name = token_span.slice(context.source); + if context.registry.has(name) { + let command = context.registry.expect_command(name); + CommandSignature::Internal(command.spanned(token_span)) + } else { + CommandSignature::External(token_span) + } + } + _ => { + return Err(ShellError::type_error( + "command head2", + token.type_name().tagged(token_span), + )) + } + }) + }); + + match node { + Ok(expr) => return Ok(expr), + Err(_) => match expand_expr(&AnyExpressionShape, token_nodes, context) { + Ok(expr) => return Ok(CommandSignature::Expression(expr)), + Err(_) => Err(token_nodes.peek_non_ws().type_error("command head3")), + }, + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ClassifiedCommandShape; + +impl ExpandSyntax for ClassifiedCommandShape { + type Output = ClassifiedCommand; + + fn expand_syntax<'a, 'b>( + &self, + iterator: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let head = expand_syntax(&CommandHeadShape, iterator, context)?; + + match &head { + CommandSignature::Expression(expr) => Err(ShellError::syntax_error( + "Unexpected expression in command position".tagged(expr.span), + )), + + // If the command starts with `^`, treat it as an external command no matter what + CommandSignature::External(name) => { + let name_str = name.slice(&context.source); + + external_command(iterator, &context.source, name_str.tagged(name)) + } + + CommandSignature::LiteralExternal { outer, inner } => { + let name_str = inner.slice(&context.source); + + external_command(iterator, &context.source, name_str.tagged(outer)) + } + + CommandSignature::Internal(command) => { + let tail = + parse_command_tail(&command.signature(), &context, iterator, command.span)?; + + let (positional, named) = match tail { + None => (None, None), + Some((positional, named)) => (positional, named), + }; + + let call = hir::Call { + head: Box::new(head.to_expression()), + positional, + named, + }; + + Ok(ClassifiedCommand::Internal(InternalCommand::new( + command.item.name().to_string(), + Tag { + span: command.span, + anchor: None, + }, + call, + ))) + } + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct InternalCommandHeadShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for InternalCommandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head4"); + + let peeked_head = match peeked_head { + Err(_) => return Ok(()), + Ok(peeked_head) => peeked_head, + }; + + let _expr = match peeked_head.node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => shapes.push(FlatShape::Word.spanned(*span)), + + TokenNode::Token(Spanned { + item: RawToken::String(_inner_tag), + span, + }) => shapes.push(FlatShape::String.spanned(*span)), + + _node => shapes.push(FlatShape::Error.spanned(peeked_head.node.span())), + }; + + peeked_head.commit(); + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for InternalCommandHeadShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "InternalCommandHeadShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head4"); + + let peeked_head = match peeked_head { + Err(_) => return Ok(()), + Ok(peeked_head) => peeked_head, + }; + + let node = peeked_head.commit(); + + let _expr = match node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => token_nodes.color_shape(FlatShape::Word.spanned(*span)), + + TokenNode::Token(Spanned { + item: RawToken::String(_inner_tag), + span, + }) => token_nodes.color_shape(FlatShape::String.spanned(*span)), + + _node => token_nodes.color_shape(FlatShape::Error.spanned(node.span())), + }; + + Ok(()) + } +} + +impl ExpandExpression for InternalCommandHeadShape { + fn expand_expr( + &self, + token_nodes: &mut TokensIterator<'_>, + _context: &ExpandContext, + ) -> Result { + let peeked_head = token_nodes.peek_non_ws().not_eof("command head4")?; + + let expr = match peeked_head.node { + TokenNode::Token( + spanned @ Spanned { + item: RawToken::Bare, + .. + }, + ) => spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare)), + + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span, + }) => hir::RawExpression::Literal(hir::Literal::String(*inner_span)).spanned(*span), + + node => { + return Err(ShellError::type_error( + "command head5", + node.tagged_type_name(), + )) + } + }; + + peeked_head.commit(); + + Ok(expr) + } +} + +pub(crate) struct SingleError<'token> { + expected: &'static str, + node: &'token Spanned, +} + +impl<'token> SingleError<'token> { + pub(crate) fn error(&self) -> ShellError { + ShellError::type_error(self.expected, self.node.type_name().tagged(self.node.span)) + } +} + +fn parse_single_node<'a, 'b, T>( + token_nodes: &'b mut TokensIterator<'a>, + expected: &'static str, + callback: impl FnOnce(RawToken, Span, SingleError) -> Result, +) -> Result { + token_nodes.peek_any_token(expected, |node| match node { + TokenNode::Token(token) => callback( + token.item, + token.span, + SingleError { + expected, + node: token, + }, + ), + + other => Err(ShellError::type_error(expected, other.tagged_type_name())), + }) +} + +fn parse_single_node_skipping_ws<'a, 'b, T>( + token_nodes: &'b mut TokensIterator<'a>, + expected: &'static str, + callback: impl FnOnce(RawToken, Span, SingleError) -> Result, +) -> Result { + let peeked = token_nodes.peek_non_ws().not_eof(expected)?; + + let expr = match peeked.node { + TokenNode::Token(token) => callback( + token.item, + token.span, + SingleError { + expected, + node: token, + }, + )?, + + other => return Err(ShellError::type_error(expected, other.tagged_type_name())), + }; + + peeked.commit(); + + Ok(expr) +} + +#[derive(Debug, Copy, Clone)] +pub struct WhitespaceShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for WhitespaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return Ok(()), + Ok(peeked) => peeked, + }; + + let _tag = match peeked.node { + TokenNode::Whitespace(span) => shapes.push(FlatShape::Whitespace.spanned(*span)), + + _other => return Ok(()), + }; + + peeked.commit(); + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for WhitespaceShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "WhitespaceShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return Ok(()), + Ok(peeked) => peeked, + }; + + let node = peeked.commit(); + + let _ = match node { + TokenNode::Whitespace(span) => { + token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)) + } + + _other => return Ok(()), + }; + + Ok(()) + } +} + +impl ExpandSyntax for WhitespaceShape { + type Output = Span; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + let span = match peeked.node { + TokenNode::Whitespace(tag) => *tag, + + other => { + return Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )) + } + }; + + peeked.commit(); + + Ok(span) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct SpacedExpression { + inner: T, +} + +impl ExpandExpression for SpacedExpression { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // TODO: Make the name part of the trait + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(_) => { + peeked.commit(); + expand_expr(&self.inner, token_nodes, context) + } + + other => Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )), + } + } +} + +pub fn maybe_spaced(inner: T) -> MaybeSpacedExpression { + MaybeSpacedExpression { inner } +} + +#[derive(Debug, Copy, Clone)] +pub struct MaybeSpacedExpression { + inner: T, +} + +#[derive(Debug, Copy, Clone)] +pub struct MaybeSpaceShape; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for MaybeSpaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return, + Ok(peeked) => peeked, + }; + + if let TokenNode::Whitespace(span) = peeked.node { + peeked.commit(); + shapes.push(FlatShape::Whitespace.spanned(*span)); + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for MaybeSpaceShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "MaybeSpaceShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Self::Info { + let peeked = token_nodes.peek_any().not_eof("whitespace"); + + let peeked = match peeked { + Err(_) => return, + Ok(peeked) => peeked, + }; + + if let TokenNode::Whitespace(span) = peeked.node { + peeked.commit(); + token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)); + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct SpaceShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for SpaceShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(span) => { + peeked.commit(); + shapes.push(FlatShape::Whitespace.spanned(*span)); + Ok(()) + } + + other => Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )), + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for SpaceShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "SpaceShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(span) => { + peeked.commit(); + token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)); + Ok(()) + } + + other => Err(ShellError::type_error( + "whitespace", + other.tagged_type_name(), + )), + } + } +} + +impl ExpandExpression for MaybeSpacedExpression { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // TODO: Make the name part of the trait + let peeked = token_nodes.peek_any().not_eof("whitespace")?; + + match peeked.node { + TokenNode::Whitespace(_) => { + peeked.commit(); + expand_expr(&self.inner, token_nodes, context) + } + + _ => { + peeked.rollback(); + expand_expr(&self.inner, token_nodes, context) + } + } + } +} + +pub fn spaced(inner: T) -> SpacedExpression { + SpacedExpression { inner } +} + +fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expression { + if span.slice(source) == "it" { + hir::Expression::it_variable(span, token_span) + } else { + hir::Expression::variable(span, token_span) + } +} + +fn classify_command( + mut iterator: &mut TokensIterator, + context: &ExpandContext, + source: &Text, +) -> Result { + let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?; + + match &head { + CommandSignature::Expression(_) => Err(ShellError::syntax_error( + "Unexpected expression in command position".tagged(iterator.whole_span()), + )), + + // If the command starts with `^`, treat it as an external command no matter what + CommandSignature::External(name) => { + let name_str = name.slice(source); + + external_command(&mut iterator, source, name_str.tagged(name)) + } + + CommandSignature::LiteralExternal { outer, inner } => { + let name_str = inner.slice(source); + + external_command(&mut iterator, source, name_str.tagged(outer)) + } + + CommandSignature::Internal(command) => { + let tail = + parse_command_tail(&command.signature(), &context, &mut iterator, command.span)?; + + let (positional, named) = match tail { + None => (None, None), + Some((positional, named)) => (positional, named), + }; + + let call = hir::Call { + head: Box::new(head.to_expression()), + positional, + named, + }; + + Ok(ClassifiedCommand::Internal(InternalCommand::new( + command.name().to_string(), + Tag { + span: command.span, + anchor: None, + }, + call, + ))) + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandShape; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for CommandShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes); + + match kind { + Err(_) => { + // We didn't find a command, so we'll have to fall back to parsing this pipeline part + // as a blob of undifferentiated expressions + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + } + + Ok(CommandHeadKind::External) => { + color_syntax(&ExternalTokensShape, token_nodes, context, shapes); + } + Ok(CommandHeadKind::Internal(signature)) => { + color_syntax_with(&CommandTailShape, &signature, token_nodes, context, shapes); + } + }; + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for CommandShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "CommandShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) { + let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context); + + match kind { + Err(_) => { + // We didn't find a command, so we'll have to fall back to parsing this pipeline part + // as a blob of undifferentiated expressions + color_syntax(&ExpressionListShape, token_nodes, context); + } + + Ok(CommandHeadKind::External) => { + color_syntax(&ExternalTokensShape, token_nodes, context); + } + Ok(CommandHeadKind::Internal(signature)) => { + color_syntax_with(&CommandTailShape, &signature, token_nodes, context); + } + }; + } +} diff --git a/src/parser/hir/syntax_shape/block.rs b/src/parser/hir/syntax_shape/block.rs new file mode 100644 index 0000000000..0061c0fe8c --- /dev/null +++ b/src/parser/hir/syntax_shape/block.rs @@ -0,0 +1,509 @@ +use crate::errors::ShellError; +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::{ + hir, + hir::syntax_shape::{ + color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax, + DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape, + ExpressionListShape, FallibleColorSyntax, MemberShape, PathTailShape, VariablePathShape, + }, + hir::tokens_iterator::TokensIterator, + parse::token_tree::Delimiter, + RawToken, TokenNode, +}; +use crate::{Span, Spanned, SpannedItem}; + +#[derive(Debug, Copy, Clone)] +pub struct AnyBlockShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for AnyBlockShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let block = token_nodes.peek_non_ws().not_eof("block"); + + let block = match block { + Err(_) => return Ok(()), + Ok(block) => block, + }; + + // is it just a block? + let block = block.node.as_block(); + + match block { + // If so, color it as a block + Some((children, spans)) => { + let mut token_nodes = TokensIterator::new(children.item, context.span, false); + color_syntax_with( + &DelimitedShape, + &(Delimiter::Brace, spans.0, spans.1), + &mut token_nodes, + context, + shapes, + ); + + return Ok(()); + } + _ => {} + } + + // Otherwise, look for a shorthand block. If none found, fail + color_fallible_syntax(&ShorthandBlock, token_nodes, context, shapes) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for AnyBlockShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "AnyBlockShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let block = token_nodes.peek_non_ws().not_eof("block"); + + let block = match block { + Err(_) => return Ok(()), + Ok(block) => block, + }; + + // is it just a block? + let block = block.node.as_block(); + + match block { + // If so, color it as a block + Some((children, spans)) => { + token_nodes.child(children, |token_nodes| { + color_syntax_with( + &DelimitedShape, + &(Delimiter::Brace, spans.0, spans.1), + token_nodes, + context, + ); + }); + + return Ok(()); + } + _ => {} + } + + // Otherwise, look for a shorthand block. If none found, fail + color_fallible_syntax(&ShorthandBlock, token_nodes, context) + } +} + +impl ExpandExpression for AnyBlockShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let block = token_nodes.peek_non_ws().not_eof("block")?; + + // is it just a block? + let block = block.node.as_block(); + + match block { + Some((block, _tags)) => { + let mut iterator = TokensIterator::new(&block.item, context.span, false); + + let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?; + + return Ok(hir::RawExpression::Block(exprs).spanned(block.span)); + } + _ => {} + } + + expand_syntax(&ShorthandBlock, token_nodes, context) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ShorthandBlock; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ShorthandBlock { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Try to find a shorthand head. If none found, fail + color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes); + + match result { + // if no continuation was found, we're done + Err(_) => break, + // if a continuation was found, look for another one + Ok(_) => continue, + } + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ShorthandBlock { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "ShorthandBlock" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // Try to find a shorthand head. If none found, fail + color_fallible_syntax(&ShorthandPath, token_nodes, context)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context); + + match result { + // if no continuation was found, we're done + Err(_) => break, + // if a continuation was found, look for another one + Ok(_) => continue, + } + } + + Ok(()) + } +} + +impl ExpandExpression for ShorthandBlock { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let path = expand_expr(&ShorthandPath, token_nodes, context)?; + let start = path.span; + let expr = continue_expression(path, token_nodes, context)?; + let end = expr.span; + let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end)); + + Ok(block) + } +} + +/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block +#[derive(Debug, Copy, Clone)] +pub struct ShorthandPath; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ShorthandPath { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes); + + match variable { + Ok(_) => { + // if it's a variable path, that's the head part + return Ok(()); + } + + Err(_) => { + // otherwise, we'll try to find a member path + } + } + + // look for a member (`` -> `$it.`) + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = color_fallible_syntax(&PathTailShape, token_nodes, context, shapes); + + match tail { + Ok(_) => {} + Err(_) => { + // It's ok if there's no path tail; a single member is sufficient + } + } + + Ok(()) + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ShorthandPath { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "ShorthandPath" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context); + + match variable { + Ok(_) => { + // if it's a variable path, that's the head part + return Ok(()); + } + + Err(_) => { + // otherwise, we'll try to find a member path + } + } + + // look for a member (`` -> `$it.`) + color_fallible_syntax(&MemberShape, token_nodes, context)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = color_fallible_syntax(&PathTailShape, token_nodes, context); + + match tail { + Ok(_) => {} + Err(_) => { + // It's ok if there's no path tail; a single member is sufficient + } + } + + Ok(()) + }) + } +} + +impl ExpandExpression for ShorthandPath { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // if it's a variable path, that's the head part + let path = expand_expr(&VariablePathShape, token_nodes, context); + + match path { + Ok(path) => return Ok(path), + Err(_) => {} + } + + // Synthesize the head of the shorthand path (`` -> `$it.`) + let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?; + + // Now that we've synthesized the head, of the path, proceed to expand the tail of the path + // like any other path. + let tail = expand_syntax(&PathTailShape, token_nodes, context); + + match tail { + Err(_) => return Ok(head), + Ok((tail, _)) => { + // For each member that `PathTailShape` expanded, join it onto the existing expression + // to form a new path + for member in tail { + head = hir::Expression::dot_member(head, member); + } + + Ok(head) + } + } + } +} + +/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block +#[derive(Debug, Copy, Clone)] +pub struct ShorthandHeadShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ShorthandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + peeked.commit(); + shapes.push(FlatShape::BareMember.spanned(*span)); + Ok(()) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Spanned { + item: RawToken::String(_), + span: outer, + }) => { + peeked.commit(); + shapes.push(FlatShape::StringMember.spanned(*outer)); + Ok(()) + } + + other => Err(ShellError::type_error( + "shorthand head", + other.tagged_type_name(), + )), + } + } +} + +#[cfg(coloring_in_tokens)] +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ShorthandHeadShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + peeked.commit(); + shapes.push(FlatShape::BareMember.spanned(*span)); + Ok(()) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Spanned { + item: RawToken::String(_), + span: outer, + }) => { + peeked.commit(); + shapes.push(FlatShape::StringMember.spanned(*outer)); + Ok(()) + } + + other => Err(ShellError::type_error( + "shorthand head", + other.tagged_type_name(), + )), + } + } +} + +impl ExpandExpression for ShorthandHeadShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + // A shorthand path must not be at EOF + let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; + + match peeked.node { + // If the head of a shorthand path is a bare token, it expands to `$it.bare` + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => { + // Commit the peeked token + peeked.commit(); + + // Synthesize an `$it` expression + let it = synthetic_it(); + + // Make a path out of `$it` and the bare token as a member + Ok(hir::Expression::path( + it, + vec![span.spanned_string(context.source)], + *span, + )) + } + + // If the head of a shorthand path is a string, it expands to `$it."some string"` + TokenNode::Token(Spanned { + item: RawToken::String(inner), + span: outer, + }) => { + // Commit the peeked token + peeked.commit(); + + // Synthesize an `$it` expression + let it = synthetic_it(); + + // Make a path out of `$it` and the bare token as a member + Ok(hir::Expression::path( + it, + vec![inner.string(context.source).spanned(*outer)], + *outer, + )) + } + + // Any other token is not a valid bare head + other => { + return Err(ShellError::type_error( + "shorthand path", + other.tagged_type_name(), + )) + } + } + } +} + +fn synthetic_it() -> hir::Expression { + hir::Expression::it_variable(Span::unknown(), Span::unknown()) +} diff --git a/src/parser/hir/syntax_shape/expression.rs b/src/parser/hir/syntax_shape/expression.rs new file mode 100644 index 0000000000..0681c9c403 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression.rs @@ -0,0 +1,483 @@ +pub(crate) mod atom; +pub(crate) mod delimited; +pub(crate) mod file_path; +pub(crate) mod list; +pub(crate) mod number; +pub(crate) mod pattern; +pub(crate) mod string; +pub(crate) mod unit; +pub(crate) mod variable_path; + +use crate::parser::hir::syntax_shape::{ + color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom, + expand_delimited_square, expand_expr, expand_syntax, AtomicToken, BareShape, ColorableDotShape, + DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation, + ExpressionContinuationShape, FallibleColorSyntax, FlatShape, +}; +use crate::parser::{ + hir, + hir::{Expression, TokensIterator}, +}; +use crate::prelude::*; +use std::path::PathBuf; + +#[derive(Debug, Copy, Clone)] +pub struct AnyExpressionShape; + +impl ExpandExpression for AnyExpressionShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // Look for an expression at the cursor + let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?; + + continue_expression(head, token_nodes, context) + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for AnyExpressionShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // Look for an expression at the cursor + color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?; + + match continue_coloring_expression(token_nodes, context, shapes) { + Err(_) => { + // it's fine for there to be no continuation + } + + Ok(()) => {} + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for AnyExpressionShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "AnyExpressionShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // Look for an expression at the cursor + color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context)?; + + match continue_coloring_expression(token_nodes, context) { + Err(_) => { + // it's fine for there to be no continuation + } + + Ok(()) => {} + } + + Ok(()) + } +} + +pub(crate) fn continue_expression( + mut head: hir::Expression, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, +) -> Result { + loop { + // Check to see whether there's any continuation after the head expression + let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context); + + match continuation { + // If there's no continuation, return the head + Err(_) => return Ok(head), + // Otherwise, form a new expression by combining the head with the continuation + Ok(continuation) => match continuation { + // If the continuation is a `.member`, form a path with the new member + ExpressionContinuation::DotSuffix(_dot, member) => { + head = Expression::dot_member(head, member); + } + + // Otherwise, if the continuation is an infix suffix, form an infix expression + ExpressionContinuation::InfixSuffix(op, expr) => { + head = Expression::infix(head, op, expr); + } + }, + } + } +} + +#[cfg(not(coloring_in_tokens))] +pub(crate) fn continue_coloring_expression( + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + shapes: &mut Vec>, +) -> Result<(), ShellError> { + // if there's not even one expression continuation, fail + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes); + + match result { + Err(_) => { + // We already saw one continuation, so just return + return Ok(()); + } + + Ok(_) => {} + } + } +} + +#[cfg(coloring_in_tokens)] +pub(crate) fn continue_coloring_expression( + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, +) -> Result<(), ShellError> { + // if there's not even one expression continuation, fail + color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context)?; + + loop { + // Check to see whether there's any continuation after the head expression + let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context); + + match result { + Err(_) => { + // We already saw one continuation, so just return + return Ok(()); + } + + Ok(_) => {} + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct AnyExpressionStartShape; + +impl ExpandExpression for AnyExpressionStartShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?; + + match atom.item { + AtomicToken::Size { number, unit } => { + return Ok(hir::Expression::size( + number.to_number(context.source), + unit.item, + Tag { + span: atom.span, + anchor: None, + }, + )) + } + + AtomicToken::SquareDelimited { nodes, .. } => { + expand_delimited_square(&nodes, atom.span.into(), context) + } + + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + let end = expand_syntax(&BareTailShape, token_nodes, context)?; + Ok(hir::Expression::bare(atom.span.until_option(end))) + } + + other => return other.spanned(atom.span).into_hir(context, "expression"), + } + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for AnyExpressionStartShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom( + token_nodes, + "expression", + context, + ExpansionRule::permissive(), + ) + }); + + let atom = match atom { + Spanned { + item: Err(_err), + span, + } => { + shapes.push(FlatShape::Error.spanned(span)); + return Ok(()); + } + + Spanned { + item: Ok(value), .. + } => value, + }; + + match atom.item { + AtomicToken::Size { number, unit } => shapes.push( + FlatShape::Size { + number: number.span.into(), + unit: unit.span.into(), + } + .spanned(atom.span), + ), + + AtomicToken::SquareDelimited { nodes, spans } => { + color_delimited_square(spans, &nodes, atom.span.into(), context, shapes) + } + + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + shapes.push(FlatShape::Word.spanned(atom.span)); + } + + _ => atom.color_tokens(shapes), + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for AnyExpressionStartShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "AnyExpressionStartShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom( + token_nodes, + "expression", + context, + ExpansionRule::permissive(), + ) + }); + + let atom = match atom { + Spanned { + item: Err(_err), + span, + } => { + token_nodes.color_shape(FlatShape::Error.spanned(span)); + return Ok(()); + } + + Spanned { + item: Ok(value), .. + } => value, + }; + + match atom.item { + AtomicToken::Size { number, unit } => token_nodes.color_shape( + FlatShape::Size { + number: number.span.into(), + unit: unit.span.into(), + } + .spanned(atom.span), + ), + + AtomicToken::SquareDelimited { nodes, spans } => { + token_nodes.child((&nodes[..]).spanned(atom.span), |tokens| { + color_delimited_square(spans, tokens, atom.span.into(), context); + }); + } + + AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { + token_nodes.color_shape(FlatShape::Word.spanned(atom.span)); + } + + _ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), + } + + Ok(()) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BareTailShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for BareTailShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let len = shapes.len(); + + loop { + let word = color_fallible_syntax_with( + &BareShape, + &FlatShape::Word, + token_nodes, + context, + shapes, + ); + + match word { + // if a word was found, continue + Ok(_) => continue, + // if a word wasn't found, try to find a dot + Err(_) => {} + } + + // try to find a dot + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Word, + token_nodes, + context, + shapes, + ); + + match dot { + // if a dot was found, try to find another word + Ok(_) => continue, + // otherwise, we're done + Err(_) => break, + } + } + + if shapes.len() > len { + Ok(()) + } else { + Err(ShellError::syntax_error( + "No tokens matched BareTailShape".tagged_unknown(), + )) + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for BareTailShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "BareTailShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let len = token_nodes.state().shapes().len(); + + loop { + let word = + color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context); + + match word { + // if a word was found, continue + Ok(_) => continue, + // if a word wasn't found, try to find a dot + Err(_) => {} + } + + // try to find a dot + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Word, + token_nodes, + context, + ); + + match dot { + // if a dot was found, try to find another word + Ok(_) => continue, + // otherwise, we're done + Err(_) => break, + } + } + + if token_nodes.state().shapes().len() > len { + Ok(()) + } else { + Err(ShellError::syntax_error( + "No tokens matched BareTailShape".tagged_unknown(), + )) + } + } +} + +impl ExpandSyntax for BareTailShape { + type Output = Option; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result, ShellError> { + let mut end: Option = None; + + loop { + match expand_syntax(&BareShape, token_nodes, context) { + Ok(bare) => { + end = Some(bare.span); + continue; + } + + Err(_) => match expand_syntax(&DotShape, token_nodes, context) { + Ok(dot) => { + end = Some(dot); + continue; + } + + Err(_) => break, + }, + } + } + + Ok(end) + } +} + +pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf { + let expanded = shellexpand::tilde_with_context(string, || context.homedir()); + + PathBuf::from(expanded.as_ref()) +} diff --git a/src/parser/hir/syntax_shape/expression/atom.rs b/src/parser/hir/syntax_shape/expression/atom.rs new file mode 100644 index 0000000000..888d9430e6 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/atom.rs @@ -0,0 +1,583 @@ +use crate::parser::hir::syntax_shape::{ + expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape, + BarePatternShape, ExpandContext, UnitShape, +}; +use crate::parser::{ + hir, + hir::{Expression, RawNumber, TokensIterator}, + parse::flag::{Flag, FlagKind}, + DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit, +}; +use crate::prelude::*; +use crate::{Span, Spanned}; + +#[derive(Debug)] +pub enum AtomicToken<'tokens> { + Eof { + span: Span, + }, + Error { + error: Spanned, + }, + Number { + number: RawNumber, + }, + Size { + number: Spanned, + unit: Spanned, + }, + String { + body: Span, + }, + ItVariable { + name: Span, + }, + Variable { + name: Span, + }, + ExternalCommand { + command: Span, + }, + ExternalWord { + text: Span, + }, + GlobPattern { + pattern: Span, + }, + FilePath { + path: Span, + }, + Word { + text: Span, + }, + SquareDelimited { + spans: (Span, Span), + nodes: &'tokens Vec, + }, + ParenDelimited { + span: (Span, Span), + nodes: &'tokens Vec, + }, + BraceDelimited { + spans: (Span, Span), + nodes: &'tokens Vec, + }, + Pipeline { + pipe: Option, + elements: Spanned<&'tokens Vec>, + }, + ShorthandFlag { + name: Span, + }, + LonghandFlag { + name: Span, + }, + Dot { + text: Span, + }, + Operator { + text: Span, + }, + Whitespace { + text: Span, + }, +} + +pub type SpannedAtomicToken<'tokens> = Spanned>; + +impl<'tokens> SpannedAtomicToken<'tokens> { + pub fn into_hir( + &self, + context: &ExpandContext, + expected: &'static str, + ) -> Result { + Ok(match &self.item { + AtomicToken::Eof { .. } => { + return Err(ShellError::type_error( + expected, + "eof atomic token".tagged(self.span), + )) + } + AtomicToken::Error { .. } => { + return Err(ShellError::type_error( + expected, + "eof atomic token".tagged(self.span), + )) + } + AtomicToken::Operator { .. } => { + return Err(ShellError::type_error( + expected, + "operator".tagged(self.span), + )) + } + AtomicToken::ShorthandFlag { .. } => { + return Err(ShellError::type_error( + expected, + "shorthand flag".tagged(self.span), + )) + } + AtomicToken::LonghandFlag { .. } => { + return Err(ShellError::type_error(expected, "flag".tagged(self.span))) + } + AtomicToken::Whitespace { .. } => { + return Err(ShellError::unimplemented("whitespace in AtomicToken")) + } + AtomicToken::Dot { .. } => { + return Err(ShellError::type_error(expected, "dot".tagged(self.span))) + } + AtomicToken::Number { number } => { + Expression::number(number.to_number(context.source), self.span) + } + AtomicToken::FilePath { path } => Expression::file_path( + expand_file_path(path.slice(context.source), context), + self.span, + ), + AtomicToken::Size { number, unit } => { + Expression::size(number.to_number(context.source), **unit, self.span) + } + AtomicToken::String { body } => Expression::string(*body, self.span), + AtomicToken::ItVariable { name } => Expression::it_variable(*name, self.span), + AtomicToken::Variable { name } => Expression::variable(*name, self.span), + AtomicToken::ExternalCommand { command } => { + Expression::external_command(*command, self.span) + } + AtomicToken::ExternalWord { text } => Expression::string(*text, self.span), + AtomicToken::GlobPattern { pattern } => Expression::pattern( + expand_file_path(pattern.slice(context.source), context).to_string_lossy(), + self.span, + ), + AtomicToken::Word { text } => Expression::string(*text, *text), + AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"), + AtomicToken::Pipeline { .. } => unimplemented!("into_hir"), + }) + } + + pub fn spanned_type_name(&self) -> Spanned<&'static str> { + match &self.item { + AtomicToken::Eof { .. } => "eof", + AtomicToken::Error { .. } => "error", + AtomicToken::Operator { .. } => "operator", + AtomicToken::ShorthandFlag { .. } => "shorthand flag", + AtomicToken::LonghandFlag { .. } => "flag", + AtomicToken::Whitespace { .. } => "whitespace", + AtomicToken::Dot { .. } => "dot", + AtomicToken::Number { .. } => "number", + AtomicToken::FilePath { .. } => "file path", + AtomicToken::Size { .. } => "size", + AtomicToken::String { .. } => "string", + AtomicToken::ItVariable { .. } => "$it", + AtomicToken::Variable { .. } => "variable", + AtomicToken::ExternalCommand { .. } => "external command", + AtomicToken::ExternalWord { .. } => "external word", + AtomicToken::GlobPattern { .. } => "file pattern", + AtomicToken::Word { .. } => "word", + AtomicToken::SquareDelimited { .. } => "array literal", + AtomicToken::ParenDelimited { .. } => "parenthesized expression", + AtomicToken::BraceDelimited { .. } => "block", + AtomicToken::Pipeline { .. } => "pipeline", + } + .spanned(self.span) + } + + pub fn tagged_type_name(&self) -> Tagged<&'static str> { + match &self.item { + AtomicToken::Eof { .. } => "eof", + AtomicToken::Error { .. } => "error", + AtomicToken::Operator { .. } => "operator", + AtomicToken::ShorthandFlag { .. } => "shorthand flag", + AtomicToken::LonghandFlag { .. } => "flag", + AtomicToken::Whitespace { .. } => "whitespace", + AtomicToken::Dot { .. } => "dot", + AtomicToken::Number { .. } => "number", + AtomicToken::FilePath { .. } => "file path", + AtomicToken::Size { .. } => "size", + AtomicToken::String { .. } => "string", + AtomicToken::ItVariable { .. } => "$it", + AtomicToken::Variable { .. } => "variable", + AtomicToken::ExternalCommand { .. } => "external command", + AtomicToken::ExternalWord { .. } => "external word", + AtomicToken::GlobPattern { .. } => "file pattern", + AtomicToken::Word { .. } => "word", + AtomicToken::SquareDelimited { .. } => "array literal", + AtomicToken::ParenDelimited { .. } => "parenthesized expression", + AtomicToken::BraceDelimited { .. } => "block", + AtomicToken::Pipeline { .. } => "pipeline", + } + .tagged(self.span) + } + + pub(crate) fn color_tokens(&self, shapes: &mut Vec>) { + match &self.item { + AtomicToken::Eof { .. } => {} + AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.spanned(self.span)), + AtomicToken::Operator { .. } => { + return shapes.push(FlatShape::Operator.spanned(self.span)); + } + AtomicToken::ShorthandFlag { .. } => { + return shapes.push(FlatShape::ShorthandFlag.spanned(self.span)); + } + AtomicToken::LonghandFlag { .. } => { + return shapes.push(FlatShape::Flag.spanned(self.span)); + } + AtomicToken::Whitespace { .. } => { + return shapes.push(FlatShape::Whitespace.spanned(self.span)); + } + AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.spanned(self.span)), + AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.spanned(self.span)), + AtomicToken::Number { + number: RawNumber::Decimal(_), + } => { + return shapes.push(FlatShape::Decimal.spanned(self.span)); + } + AtomicToken::Number { + number: RawNumber::Int(_), + } => { + return shapes.push(FlatShape::Int.spanned(self.span)); + } + AtomicToken::Size { number, unit } => { + return shapes.push( + FlatShape::Size { + number: number.span, + unit: unit.span, + } + .spanned(self.span), + ); + } + AtomicToken::String { .. } => return shapes.push(FlatShape::String.spanned(self.span)), + AtomicToken::ItVariable { .. } => { + return shapes.push(FlatShape::ItVariable.spanned(self.span)) + } + AtomicToken::Variable { .. } => { + return shapes.push(FlatShape::Variable.spanned(self.span)) + } + AtomicToken::ExternalCommand { .. } => { + return shapes.push(FlatShape::ExternalCommand.spanned(self.span)); + } + AtomicToken::ExternalWord { .. } => { + return shapes.push(FlatShape::ExternalWord.spanned(self.span)) + } + AtomicToken::GlobPattern { .. } => { + return shapes.push(FlatShape::GlobPattern.spanned(self.span)) + } + AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.spanned(self.span)), + _ => return shapes.push(FlatShape::Error.spanned(self.span)), + } + } +} + +#[derive(Debug)] +pub enum WhitespaceHandling { + #[allow(unused)] + AllowWhitespace, + RejectWhitespace, +} + +#[derive(Debug)] +pub struct ExpansionRule { + pub(crate) allow_external_command: bool, + pub(crate) allow_external_word: bool, + pub(crate) allow_operator: bool, + pub(crate) allow_eof: bool, + pub(crate) treat_size_as_word: bool, + pub(crate) commit_errors: bool, + pub(crate) whitespace: WhitespaceHandling, +} + +impl ExpansionRule { + pub fn new() -> ExpansionRule { + ExpansionRule { + allow_external_command: false, + allow_external_word: false, + allow_operator: false, + allow_eof: false, + treat_size_as_word: false, + commit_errors: false, + whitespace: WhitespaceHandling::RejectWhitespace, + } + } + + /// The intent of permissive mode is to return an atomic token for every possible + /// input token. This is important for error-correcting parsing, such as the + /// syntax highlighter. + pub fn permissive() -> ExpansionRule { + ExpansionRule { + allow_external_command: true, + allow_external_word: true, + allow_operator: true, + allow_eof: true, + treat_size_as_word: false, + commit_errors: true, + whitespace: WhitespaceHandling::AllowWhitespace, + } + } + + #[allow(unused)] + pub fn allow_external_command(mut self) -> ExpansionRule { + self.allow_external_command = true; + self + } + + #[allow(unused)] + pub fn allow_operator(mut self) -> ExpansionRule { + self.allow_operator = true; + self + } + + #[allow(unused)] + pub fn no_operator(mut self) -> ExpansionRule { + self.allow_operator = false; + self + } + + #[allow(unused)] + pub fn no_external_command(mut self) -> ExpansionRule { + self.allow_external_command = false; + self + } + + #[allow(unused)] + pub fn allow_external_word(mut self) -> ExpansionRule { + self.allow_external_word = true; + self + } + + #[allow(unused)] + pub fn no_external_word(mut self) -> ExpansionRule { + self.allow_external_word = false; + self + } + + #[allow(unused)] + pub fn treat_size_as_word(mut self) -> ExpansionRule { + self.treat_size_as_word = true; + self + } + + #[allow(unused)] + pub fn commit_errors(mut self) -> ExpansionRule { + self.commit_errors = true; + self + } + + #[allow(unused)] + pub fn allow_whitespace(mut self) -> ExpansionRule { + self.whitespace = WhitespaceHandling::AllowWhitespace; + self + } + + #[allow(unused)] + pub fn reject_whitespace(mut self) -> ExpansionRule { + self.whitespace = WhitespaceHandling::RejectWhitespace; + self + } +} + +/// If the caller of expand_atom throws away the returned atomic token returned, it +/// must use a checkpoint to roll it back. +pub fn expand_atom<'me, 'content>( + token_nodes: &'me mut TokensIterator<'content>, + expected: &'static str, + context: &ExpandContext, + rule: ExpansionRule, +) -> Result, ShellError> { + if token_nodes.at_end() { + match rule.allow_eof { + true => { + return Ok(AtomicToken::Eof { + span: Span::unknown(), + } + .spanned(Span::unknown())) + } + false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())), + } + } + + // First, we'll need to handle the situation where more than one token corresponds + // to a single atomic token + + // If treat_size_as_word, don't try to parse the head of the token stream + // as a size. + match rule.treat_size_as_word { + true => {} + false => match expand_syntax(&UnitShape, token_nodes, context) { + // If the head of the stream isn't a valid unit, we'll try to parse + // it again next as a word + Err(_) => {} + + // But if it was a valid unit, we're done here + Ok(Spanned { + item: (number, unit), + span, + }) => return Ok(AtomicToken::Size { number, unit }.spanned(span)), + }, + } + + // Try to parse the head of the stream as a bare path. A bare path includes + // words as well as `.`s, connected together without whitespace. + match expand_syntax(&BarePathShape, token_nodes, context) { + // If we didn't find a bare path + Err(_) => {} + Ok(span) => { + let next = token_nodes.peek_any(); + + match next.node { + Some(token) if token.is_pattern() => { + // if the very next token is a pattern, we're looking at a glob, not a + // word, and we should try to parse it as a glob next + } + + _ => return Ok(AtomicToken::Word { text: span }.spanned(span)), + } + } + } + + // Try to parse the head of the stream as a pattern. A pattern includes + // words, words with `*` as well as `.`s, connected together without whitespace. + match expand_syntax(&BarePatternShape, token_nodes, context) { + // If we didn't find a bare path + Err(_) => {} + Ok(span) => return Ok(AtomicToken::GlobPattern { pattern: span }.spanned(span)), + } + + // The next token corresponds to at most one atomic token + + // We need to `peek` because `parse_single_node` doesn't cover all of the + // cases that `expand_atom` covers. We should probably collapse the two + // if possible. + let peeked = token_nodes.peek_any().not_eof(expected)?; + + match peeked.node { + TokenNode::Token(_) => { + // handle this next + } + + TokenNode::Error(error) => { + peeked.commit(); + return Ok(AtomicToken::Error { + error: error.clone(), + } + .spanned(error.span)); + } + + // [ ... ] + TokenNode::Delimited(Spanned { + item: + DelimitedNode { + delimiter: Delimiter::Square, + spans, + children, + }, + span, + }) => { + peeked.commit(); + let span = *span; + return Ok(AtomicToken::SquareDelimited { + nodes: children, + spans: *spans, + } + .spanned(span)); + } + + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Shorthand, + name, + }, + span, + }) => { + peeked.commit(); + return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span)); + } + + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Longhand, + name, + }, + span, + }) => { + peeked.commit(); + return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span)); + } + + // If we see whitespace, process the whitespace according to the whitespace + // handling rules + TokenNode::Whitespace(span) => match rule.whitespace { + // if whitespace is allowed, return a whitespace token + WhitespaceHandling::AllowWhitespace => { + peeked.commit(); + return Ok(AtomicToken::Whitespace { text: *span }.spanned(*span)); + } + + // if whitespace is disallowed, return an error + WhitespaceHandling::RejectWhitespace => { + return Err(ShellError::syntax_error("Unexpected whitespace".tagged( + Tag { + span: *span, + anchor: None, + }, + ))) + } + }, + + other => { + let span = peeked.node.span(); + + peeked.commit(); + return Ok(AtomicToken::Error { + error: ShellError::type_error("token", other.tagged_type_name()).spanned(span), + } + .spanned(span)); + } + } + + parse_single_node(token_nodes, expected, |token, token_span, err| { + Ok(match token { + // First, the error cases. Each error case corresponds to a expansion rule + // flag that can be used to allow the case + + // rule.allow_operator + RawToken::Operator(_) if !rule.allow_operator => return Err(err.error()), + // rule.allow_external_command + RawToken::ExternalCommand(_) if !rule.allow_external_command => { + return Err(ShellError::type_error( + expected, + token.type_name().tagged(Tag { + span: token_span, + anchor: None, + }), + )) + } + // rule.allow_external_word + RawToken::ExternalWord if !rule.allow_external_word => { + return Err(ShellError::invalid_external_word(Tag { + span: token_span, + anchor: None, + })) + } + + RawToken::Number(number) => AtomicToken::Number { number }.spanned(token_span), + RawToken::Operator(_) => AtomicToken::Operator { text: token_span }.spanned(token_span), + RawToken::String(body) => AtomicToken::String { body }.spanned(token_span), + RawToken::Variable(name) if name.slice(context.source) == "it" => { + AtomicToken::ItVariable { name }.spanned(token_span) + } + RawToken::Variable(name) => AtomicToken::Variable { name }.spanned(token_span), + RawToken::ExternalCommand(command) => { + AtomicToken::ExternalCommand { command }.spanned(token_span) + } + RawToken::ExternalWord => { + AtomicToken::ExternalWord { text: token_span }.spanned(token_span) + } + RawToken::GlobPattern => AtomicToken::GlobPattern { + pattern: token_span, + } + .spanned(token_span), + RawToken::Bare => AtomicToken::Word { text: token_span }.spanned(token_span), + }) + }) +} diff --git a/src/parser/hir/syntax_shape/expression/delimited.rs b/src/parser/hir/syntax_shape/expression/delimited.rs new file mode 100644 index 0000000000..8cd1e9805a --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/delimited.rs @@ -0,0 +1,84 @@ +use crate::parser::hir::syntax_shape::{ + color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode, +}; +use crate::parser::{hir, hir::TokensIterator, Delimiter, FlatShape}; +use crate::prelude::*; + +pub fn expand_delimited_square( + children: &Vec, + span: Span, + context: &ExpandContext, +) -> Result { + let mut tokens = TokensIterator::new(&children, span, false); + + let list = expand_syntax(&ExpressionListShape, &mut tokens, context); + + Ok(hir::Expression::list(list?, Tag { span, anchor: None })) +} + +#[cfg(not(coloring_in_tokens))] +pub fn color_delimited_square( + (open, close): (Span, Span), + children: &Vec, + span: Span, + context: &ExpandContext, + shapes: &mut Vec>, +) { + shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open)); + let mut tokens = TokensIterator::new(&children, span, false); + let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes); + shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close)); +} + +#[cfg(coloring_in_tokens)] +pub fn color_delimited_square( + (open, close): (Span, Span), + token_nodes: &mut TokensIterator, + _span: Span, + context: &ExpandContext, +) { + token_nodes.color_shape(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open)); + let _list = color_syntax(&ExpressionListShape, token_nodes, context); + token_nodes.color_shape(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close)); +} + +#[derive(Debug, Copy, Clone)] +pub struct DelimitedShape; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for DelimitedShape { + type Info = (); + type Input = (Delimiter, Span, Span); + fn color_syntax<'a, 'b>( + &self, + (delimiter, open, close): &(Delimiter, Span, Span), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + shapes.push(FlatShape::OpenDelimiter(*delimiter).spanned(*open)); + color_syntax(&ExpressionListShape, token_nodes, context, shapes); + shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close)); + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for DelimitedShape { + type Info = (); + type Input = (Delimiter, Span, Span); + + fn name(&self) -> &'static str { + "DelimitedShape" + } + + fn color_syntax<'a, 'b>( + &self, + (delimiter, open, close): &(Delimiter, Span, Span), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + token_nodes.color_shape(FlatShape::OpenDelimiter(*delimiter).spanned(*open)); + color_syntax(&ExpressionListShape, token_nodes, context); + token_nodes.color_shape(FlatShape::CloseDelimiter(*delimiter).spanned(*close)); + } +} diff --git a/src/parser/hir/syntax_shape/expression/file_path.rs b/src/parser/hir/syntax_shape/expression/file_path.rs new file mode 100644 index 0000000000..f0e5ee0079 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/file_path.rs @@ -0,0 +1,114 @@ +use crate::parser::hir::syntax_shape::expression::atom::{expand_atom, AtomicToken, ExpansionRule}; +use crate::parser::hir::syntax_shape::{ + expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape, +}; +use crate::parser::{hir, hir::TokensIterator}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct FilePathShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for FilePathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "file path", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom.item { + AtomicToken::Word { .. } + | AtomicToken::String { .. } + | AtomicToken::Number { .. } + | AtomicToken::Size { .. } => { + shapes.push(FlatShape::Path.spanned(atom.span)); + } + + _ => atom.color_tokens(shapes), + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for FilePathShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "FilePathShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "file path", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom.item { + AtomicToken::Word { .. } + | AtomicToken::String { .. } + | AtomicToken::Number { .. } + | AtomicToken::Size { .. } => { + token_nodes.color_shape(FlatShape::Path.spanned(atom.span)); + } + + _ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), + } + + Ok(()) + } +} + +impl ExpandExpression for FilePathShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let atom = expand_atom(token_nodes, "file path", context, ExpansionRule::new())?; + + match atom.item { + AtomicToken::Word { text: body } | AtomicToken::String { body } => { + let path = expand_file_path(body.slice(context.source), context); + return Ok(hir::Expression::file_path(path, atom.span)); + } + + AtomicToken::Number { .. } | AtomicToken::Size { .. } => { + let path = atom.span.slice(context.source); + return Ok(hir::Expression::file_path(path, atom.span)); + } + + _ => return atom.into_hir(context, "file path"), + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/list.rs b/src/parser/hir/syntax_shape/expression/list.rs new file mode 100644 index 0000000000..51a6b852ca --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/list.rs @@ -0,0 +1,314 @@ +use crate::errors::ShellError; +#[cfg(not(coloring_in_tokens))] +use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::{ + hir, + hir::syntax_shape::{ + color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced, + AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule, + MaybeSpaceShape, SpaceShape, + }, + hir::TokensIterator, +}; +#[cfg(not(coloring_in_tokens))] +use crate::Spanned; + +#[derive(Debug, Copy, Clone)] +pub struct ExpressionListShape; + +impl ExpandSyntax for ExpressionListShape { + type Output = Vec; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result, ShellError> { + let mut exprs = vec![]; + + if token_nodes.at_end_possible_ws() { + return Ok(exprs); + } + + let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?; + + exprs.push(expr); + + loop { + if token_nodes.at_end_possible_ws() { + return Ok(exprs); + } + + let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?; + + exprs.push(expr); + } + } +} + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for ExpressionListShape { + type Info = (); + type Input = (); + + /// The intent of this method is to fully color an expression list shape infallibly. + /// This means that if we can't expand a token into an expression, we fall back to + /// a simpler coloring strategy. + /// + /// This would apply to something like `where x >`, which includes an incomplete + /// binary operator. Since we will fail to process it as a binary operator, we'll + /// fall back to a simpler coloring and move on. + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + // We encountered a parsing error and will continue with simpler coloring ("backoff + // coloring mode") + let mut backoff = false; + + // Consume any leading whitespace + color_syntax(&MaybeSpaceShape, token_nodes, context, shapes); + + loop { + // If we reached the very end of the token stream, we're done + if token_nodes.at_end() { + return; + } + + if backoff { + let len = shapes.len(); + + // If we previously encountered a parsing error, use backoff coloring mode + color_syntax(&SimplestExpression, token_nodes, context, shapes); + + if len == shapes.len() && !token_nodes.at_end() { + // This should never happen, but if it does, a panic is better than an infinite loop + panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression") + } + } else { + // Try to color the head of the stream as an expression + match color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) { + // If no expression was found, switch to backoff coloring mode + Err(_) => { + backoff = true; + continue; + } + Ok(_) => {} + } + + // If an expression was found, consume a space + match color_fallible_syntax(&SpaceShape, token_nodes, context, shapes) { + Err(_) => { + // If no space was found, we're either at the end or there's an error. + // Either way, switch to backoff coloring mode. If we're at the end + // it won't have any consequences. + backoff = true; + } + Ok(_) => { + // Otherwise, move on to the next expression + } + } + } + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for ExpressionListShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "ExpressionListShape" + } + + /// The intent of this method is to fully color an expression list shape infallibly. + /// This means that if we can't expand a token into an expression, we fall back to + /// a simpler coloring strategy. + /// + /// This would apply to something like `where x >`, which includes an incomplete + /// binary operator. Since we will fail to process it as a binary operator, we'll + /// fall back to a simpler coloring and move on. + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) { + // We encountered a parsing error and will continue with simpler coloring ("backoff + // coloring mode") + let mut backoff = false; + + // Consume any leading whitespace + color_syntax(&MaybeSpaceShape, token_nodes, context); + + loop { + // If we reached the very end of the token stream, we're done + if token_nodes.at_end() { + return; + } + + if backoff { + let len = token_nodes.state().shapes().len(); + + // If we previously encountered a parsing error, use backoff coloring mode + color_syntax(&SimplestExpression, token_nodes, context); + + if len == token_nodes.state().shapes().len() && !token_nodes.at_end() { + // This should never happen, but if it does, a panic is better than an infinite loop + panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression") + } + } else { + // Try to color the head of the stream as an expression + match color_fallible_syntax(&AnyExpressionShape, token_nodes, context) { + // If no expression was found, switch to backoff coloring mode + Err(_) => { + backoff = true; + continue; + } + Ok(_) => {} + } + + // If an expression was found, consume a space + match color_fallible_syntax(&SpaceShape, token_nodes, context) { + Err(_) => { + // If no space was found, we're either at the end or there's an error. + // Either way, switch to backoff coloring mode. If we're at the end + // it won't have any consequences. + backoff = true; + } + Ok(_) => { + // Otherwise, move on to the next expression + } + } + } + } + } +} + +/// BackoffColoringMode consumes all of the remaining tokens in an infallible way +#[derive(Debug, Copy, Clone)] +pub struct BackoffColoringMode; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for BackoffColoringMode { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + loop { + if token_nodes.at_end() { + break; + } + + let len = shapes.len(); + color_syntax(&SimplestExpression, token_nodes, context, shapes); + + if len == shapes.len() && !token_nodes.at_end() { + // This shouldn't happen, but if it does, a panic is better than an infinite loop + panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, shapes); + } + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for BackoffColoringMode { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "BackoffColoringMode" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &Self::Input, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + loop { + if token_nodes.at_end() { + break; + } + + let len = token_nodes.state().shapes().len(); + color_syntax(&SimplestExpression, token_nodes, context); + + if len == token_nodes.state().shapes().len() && !token_nodes.at_end() { + // This shouldn't happen, but if it does, a panic is better than an infinite loop + panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes()); + } + } + } +} + +/// The point of `SimplestExpression` is to serve as an infallible base case for coloring. +/// As a last ditch effort, if we can't find any way to parse the head of the stream as an +/// expression, fall back to simple coloring. +#[derive(Debug, Copy, Clone)] +pub struct SimplestExpression; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for SimplestExpression { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) { + let atom = expand_atom( + token_nodes, + "any token", + context, + ExpansionRule::permissive(), + ); + + match atom { + Err(_) => {} + Ok(atom) => atom.color_tokens(shapes), + } + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for SimplestExpression { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "SimplestExpression" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) { + let atom = expand_atom( + token_nodes, + "any token", + context, + ExpansionRule::permissive(), + ); + + match atom { + Err(_) => {} + Ok(atom) => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/number.rs b/src/parser/hir/syntax_shape/expression/number.rs new file mode 100644 index 0000000000..d4069478e9 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/number.rs @@ -0,0 +1,204 @@ +use crate::parser::hir::syntax_shape::{ + expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, + FallibleColorSyntax, FlatShape, +}; +use crate::parser::{ + hir, + hir::{RawNumber, TokensIterator}, + RawToken, +}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct NumberShape; + +impl ExpandExpression for NumberShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "Number", |token, token_span, err| { + Ok(match token { + RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), + RawToken::Variable(tag) if tag.slice(context.source) == "it" => { + hir::Expression::it_variable(tag, token_span) + } + RawToken::ExternalCommand(tag) => { + hir::Expression::external_command(tag, token_span) + } + RawToken::ExternalWord => { + return Err(ShellError::invalid_external_word(Tag { + span: token_span, + anchor: None, + })) + } + RawToken::Variable(tag) => hir::Expression::variable(tag, token_span), + RawToken::Number(number) => { + hir::Expression::number(number.to_number(context.source), token_span) + } + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(tag) => hir::Expression::string(tag, token_span), + }) + }) + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for NumberShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + shapes.push(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + atom.color_tokens(shapes); + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for NumberShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "NumberShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + token_nodes.color_shape(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); + + Ok(()) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct IntShape; + +impl ExpandExpression for IntShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "Integer", |token, token_span, err| { + Ok(match token { + RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), + RawToken::ExternalWord => { + return Err(ShellError::invalid_external_word(token_span)) + } + RawToken::Variable(span) if span.slice(context.source) == "it" => { + hir::Expression::it_variable(span, token_span) + } + RawToken::ExternalCommand(span) => { + hir::Expression::external_command(span, token_span) + } + RawToken::Variable(span) => hir::Expression::variable(span, token_span), + RawToken::Number(number @ RawNumber::Int(_)) => { + hir::Expression::number(number.to_number(context.source), token_span) + } + RawToken::Number(_) => return Err(err.error()), + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(span) => hir::Expression::string(span, token_span), + }) + }) + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for IntShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + shapes.push(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + atom.color_tokens(shapes); + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for IntShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "IntShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = token_nodes.spanned(|token_nodes| { + expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) + }); + + let atom = match atom { + Spanned { item: Err(_), span } => { + token_nodes.color_shape(FlatShape::Error.spanned(span)); + return Ok(()); + } + Spanned { item: Ok(atom), .. } => atom, + }; + + token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); + + Ok(()) + } +} diff --git a/src/parser/hir/syntax_shape/expression/pattern.rs b/src/parser/hir/syntax_shape/expression/pattern.rs new file mode 100644 index 0000000000..ed3bd610cd --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/pattern.rs @@ -0,0 +1,116 @@ +use crate::parser::hir::syntax_shape::{ + expand_atom, expand_bare, expression::expand_file_path, AtomicToken, ExpandContext, + ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, +}; +use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct PatternShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for PatternShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; + + match &atom.item { + AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => { + shapes.push(FlatShape::GlobPattern.spanned(atom.span)); + Ok(()) + } + + _ => Err(ShellError::type_error("pattern", atom.tagged_type_name())), + } + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for PatternShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "PatternShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; + + match &atom.item { + AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => { + token_nodes.color_shape(FlatShape::GlobPattern.spanned(atom.span)); + Ok(()) + } + + _ => Err(ShellError::type_error("pattern", atom.tagged_type_name())), + } + }) + } +} + +impl ExpandExpression for PatternShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::new())?; + + match atom.item { + AtomicToken::Word { text: body } + | AtomicToken::String { body } + | AtomicToken::GlobPattern { pattern: body } => { + let path = expand_file_path(body.slice(context.source), context); + return Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span)); + } + _ => return atom.into_hir(context, "pattern"), + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct BarePatternShape; + +impl ExpandSyntax for BarePatternShape { + type Output = Span; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_bare(token_nodes, context, |token| match token { + TokenNode::Token(Spanned { + item: RawToken::Bare, + .. + }) + | TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + .. + }) + | TokenNode::Token(Spanned { + item: RawToken::GlobPattern, + .. + }) => true, + + _ => false, + }) + } +} diff --git a/src/parser/hir/syntax_shape/expression/string.rs b/src/parser/hir/syntax_shape/expression/string.rs new file mode 100644 index 0000000000..46015376e8 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/string.rs @@ -0,0 +1,125 @@ +use crate::parser::hir::syntax_shape::{ + expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression, + ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, +}; +use crate::parser::hir::tokens_iterator::Peeked; +use crate::parser::{hir, hir::TokensIterator, RawToken}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct StringShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for StringShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive()); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom { + Spanned { + item: AtomicToken::String { .. }, + span, + } => shapes.push((*input).spanned(span)), + other => other.color_tokens(shapes), + } + + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for StringShape { + type Info = (); + type Input = FlatShape; + + fn name(&self) -> &'static str { + "StringShape" + } + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive()); + + let atom = match atom { + Err(_) => return Ok(()), + Ok(atom) => atom, + }; + + match atom { + Spanned { + item: AtomicToken::String { .. }, + span, + } => token_nodes.color_shape((*input).spanned(span)), + atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)), + } + + Ok(()) + } +} + +impl ExpandExpression for StringShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "String", |token, token_span, _| { + Ok(match token { + RawToken::GlobPattern => { + return Err(ShellError::type_error( + "String", + "glob pattern".tagged(token_span), + )) + } + RawToken::Operator(..) => { + return Err(ShellError::type_error( + "String", + "operator".tagged(token_span), + )) + } + RawToken::Variable(span) => expand_variable(span, token_span, &context.source), + RawToken::ExternalCommand(span) => { + hir::Expression::external_command(span, token_span) + } + RawToken::ExternalWord => { + return Err(ShellError::invalid_external_word(token_span)) + } + RawToken::Number(_) => hir::Expression::bare(token_span), + RawToken::Bare => hir::Expression::bare(token_span), + RawToken::String(span) => hir::Expression::string(span, token_span), + }) + }) + } +} + +impl TestSyntax for StringShape { + fn test<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Option> { + let peeked = token_nodes.peek_any(); + + match peeked.node { + Some(token) if token.is_string() => Some(peeked), + _ => None, + } + } +} diff --git a/src/parser/hir/syntax_shape/expression/unit.rs b/src/parser/hir/syntax_shape/expression/unit.rs new file mode 100644 index 0000000000..2c01038ebc --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/unit.rs @@ -0,0 +1,92 @@ +use crate::data::meta::Span; +use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax}; +use crate::parser::parse::tokens::RawNumber; +use crate::parser::parse::unit::Unit; +use crate::parser::{hir::TokensIterator, RawToken, TokenNode}; +use crate::prelude::*; +use nom::branch::alt; +use nom::bytes::complete::tag; +use nom::character::complete::digit1; +use nom::combinator::{all_consuming, opt, value}; +use nom::IResult; + +#[derive(Debug, Copy, Clone)] +pub struct UnitShape; + +impl ExpandSyntax for UnitShape { + type Output = Spanned<(Spanned, Spanned)>; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result, Spanned)>, ShellError> { + let peeked = token_nodes.peek_any().not_eof("unit")?; + + let span = match peeked.node { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => span, + _ => return Err(peeked.type_error("unit")), + }; + + let unit = unit_size(span.slice(context.source), *span); + + let (_, (number, unit)) = match unit { + Err(_) => { + return Err(ShellError::type_error( + "unit", + "word".tagged(Tag::unknown()), + )) + } + Ok((number, unit)) => (number, unit), + }; + + peeked.commit(); + Ok((number, unit).spanned(*span)) + } +} + +fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned, Spanned)> { + let (input, digits) = digit1(input)?; + + let (input, dot) = opt(tag("."))(input)?; + + let (input, number) = match dot { + Some(dot) => { + let (input, rest) = digit1(input)?; + ( + input, + RawNumber::decimal(Span::new( + bare_span.start(), + bare_span.start() + digits.len() + dot.len() + rest.len(), + )), + ) + } + + None => ( + input, + RawNumber::int(Span::new( + bare_span.start(), + bare_span.start() + digits.len(), + )), + ), + }; + + let (input, unit) = all_consuming(alt(( + value(Unit::B, alt((tag("B"), tag("b")))), + value(Unit::KB, alt((tag("KB"), tag("kb"), tag("Kb")))), + value(Unit::MB, alt((tag("MB"), tag("mb"), tag("Mb")))), + value(Unit::GB, alt((tag("GB"), tag("gb"), tag("Gb")))), + value(Unit::TB, alt((tag("TB"), tag("tb"), tag("Tb")))), + value(Unit::PB, alt((tag("PB"), tag("pb"), tag("Pb")))), + )))(input)?; + + let start_span = number.span.end(); + + Ok(( + input, + (number, unit.spanned(Span::new(start_span, bare_span.end()))), + )) +} diff --git a/src/parser/hir/syntax_shape/expression/variable_path.rs b/src/parser/hir/syntax_shape/expression/variable_path.rs new file mode 100644 index 0000000000..5ed615a9e8 --- /dev/null +++ b/src/parser/hir/syntax_shape/expression/variable_path.rs @@ -0,0 +1,1071 @@ +use crate::parser::hir::syntax_shape::{ + color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax, + parse_single_node, AnyExpressionShape, AtomicToken, BareShape, ExpandContext, ExpandExpression, + ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, Peeked, SkipSyntax, StringShape, + TestSyntax, WhitespaceShape, +}; +use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken}; +use crate::prelude::*; + +#[derive(Debug, Copy, Clone)] +pub struct VariablePathShape; + +impl ExpandExpression for VariablePathShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // 1. let the head be the first token, expecting a variable + // 2. let the tail be an empty list of members + // 2. while the next token (excluding ws) is a dot: + // 1. consume the dot + // 2. consume the next token as a member and push it onto tail + + let head = expand_expr(&VariableShape, token_nodes, context)?; + let start = head.span; + let mut end = start; + let mut tail: Vec> = vec![]; + + loop { + match DotShape.skip(token_nodes, context) { + Err(_) => break, + Ok(_) => {} + } + + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_spanned_string(context.source); + + end = member.span; + tail.push(member); + } + + Ok(hir::Expression::path(head, tail, start.until(end))) + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for VariablePathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + // If the head of the token stream is not a variable, fail + color_fallible_syntax(&VariableShape, token_nodes, context, shapes)?; + + loop { + // look for a dot at the head of a stream + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + // if there's no dot, we're done + match dot { + Err(_) => break, + Ok(_) => {} + } + + // otherwise, look for a member, and if you don't find one, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + } + + Ok(()) + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for VariablePathShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "VariablePathShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| { + // If the head of the token stream is not a variable, fail + color_fallible_syntax(&VariableShape, token_nodes, context)?; + + loop { + // look for a dot at the head of a stream + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + ); + + // if there's no dot, we're done + match dot { + Err(_) => break, + Ok(_) => {} + } + + // otherwise, look for a member, and if you don't find one, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + } + + Ok(()) + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct PathTailShape; + +#[cfg(not(coloring_in_tokens))] +/// The failure mode of `PathTailShape` is a dot followed by a non-member +impl FallibleColorSyntax for PathTailShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| loop { + let result = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + match result { + Err(_) => return Ok(()), + Ok(_) => {} + } + + // If we've seen a dot but not a member, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + }) + } +} + +#[cfg(coloring_in_tokens)] +/// The failure mode of `PathTailShape` is a dot followed by a non-member +impl FallibleColorSyntax for PathTailShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "PathTailShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + token_nodes.atomic(|token_nodes| loop { + let result = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + ); + + match result { + Err(_) => return Ok(()), + Ok(_) => {} + } + + // If we've seen a dot but not a member, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + }) + } +} + +impl ExpandSyntax for PathTailShape { + type Output = (Vec>, Span); + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let mut end: Option = None; + let mut tail = vec![]; + + loop { + match DotShape.skip(token_nodes, context) { + Err(_) => break, + Ok(_) => {} + } + + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_spanned_string(context.source); + end = Some(member.span); + tail.push(member); + } + + match end { + None => { + return Err(ShellError::type_error("path tail", { + let typed_span = token_nodes.typed_span_at_cursor(); + + Tagged { + tag: typed_span.span.into(), + item: typed_span.item, + } + })) + } + + Some(end) => Ok((tail, end)), + } + } +} + +#[derive(Debug)] +pub enum ExpressionContinuation { + DotSuffix(Span, Spanned), + InfixSuffix(Spanned, Expression), +} + +/// An expression continuation +#[derive(Debug, Copy, Clone)] +pub struct ExpressionContinuationShape; + +impl ExpandSyntax for ExpressionContinuationShape { + type Output = ExpressionContinuation; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + // Try to expand a `.` + let dot = expand_syntax(&DotShape, token_nodes, context); + + match dot { + // If a `.` was matched, it's a `Path`, and we expect a `Member` next + Ok(dot) => { + let syntax = expand_syntax(&MemberShape, token_nodes, context)?; + let member = syntax.to_spanned_string(context.source); + + Ok(ExpressionContinuation::DotSuffix(dot, member)) + } + + // Otherwise, we expect an infix operator and an expression next + Err(_) => { + let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?; + let next = expand_expr(&AnyExpressionShape, token_nodes, context)?; + + Ok(ExpressionContinuation::InfixSuffix(op, next)) + } + } + } +} + +pub enum ContinuationInfo { + Dot, + Infix, +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ExpressionContinuationShape { + type Info = ContinuationInfo; + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result { + token_nodes.atomic(|token_nodes| { + // Try to expand a `.` + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + shapes, + ); + + match dot { + Ok(_) => { + // we found a dot, so let's keep looking for a member; if no member was found, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + Ok(ContinuationInfo::Dot) + } + Err(_) => { + let mut new_shapes = vec![]; + let result = token_nodes.atomic(|token_nodes| { + // we didn't find a dot, so let's see if we're looking at an infix. If not found, fail + color_fallible_syntax(&InfixShape, token_nodes, context, &mut new_shapes)?; + + // now that we've seen an infix shape, look for any expression. If not found, fail + color_fallible_syntax( + &AnyExpressionShape, + token_nodes, + context, + &mut new_shapes, + )?; + + Ok(ContinuationInfo::Infix) + })?; + shapes.extend(new_shapes); + Ok(result) + } + } + }) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ExpressionContinuationShape { + type Info = ContinuationInfo; + type Input = (); + + fn name(&self) -> &'static str { + "ExpressionContinuationShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + token_nodes.atomic(|token_nodes| { + // Try to expand a `.` + let dot = color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + token_nodes, + context, + ); + + match dot { + Ok(_) => { + // we found a dot, so let's keep looking for a member; if no member was found, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + + Ok(ContinuationInfo::Dot) + } + Err(_) => { + let result = token_nodes.atomic(|token_nodes| { + // we didn't find a dot, so let's see if we're looking at an infix. If not found, fail + color_fallible_syntax(&InfixShape, token_nodes, context)?; + + // now that we've seen an infix shape, look for any expression. If not found, fail + color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?; + + Ok(ContinuationInfo::Infix) + })?; + + Ok(result) + } + } + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct VariableShape; + +impl ExpandExpression for VariableShape { + fn expand_expr<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "variable", |token, token_tag, _| { + Ok(match token { + RawToken::Variable(tag) => { + if tag.slice(context.source) == "it" { + hir::Expression::it_variable(tag, token_tag) + } else { + hir::Expression::variable(tag, token_tag) + } + } + _ => { + return Err(ShellError::type_error( + "variable", + token.type_name().tagged(token_tag), + )) + } + }) + }) + } +} + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for VariableShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "variable", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(err) => return Err(err), + Ok(atom) => atom, + }; + + match &atom.item { + AtomicToken::Variable { .. } => { + shapes.push(FlatShape::Variable.spanned(atom.span)); + Ok(()) + } + AtomicToken::ItVariable { .. } => { + shapes.push(FlatShape::ItVariable.spanned(atom.span)); + Ok(()) + } + _ => Err(ShellError::type_error("variable", atom.tagged_type_name())), + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for VariableShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "VariableShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let atom = expand_atom( + token_nodes, + "variable", + context, + ExpansionRule::permissive(), + ); + + let atom = match atom { + Err(err) => return Err(err), + Ok(atom) => atom, + }; + + match &atom.item { + AtomicToken::Variable { .. } => { + token_nodes.color_shape(FlatShape::Variable.spanned(atom.span)); + Ok(()) + } + AtomicToken::ItVariable { .. } => { + token_nodes.color_shape(FlatShape::ItVariable.spanned(atom.span)); + Ok(()) + } + _ => Err(ShellError::type_error("variable", atom.tagged_type_name())), + } + } +} + +#[derive(Debug, Clone, Copy)] +pub enum Member { + String(/* outer */ Span, /* inner */ Span), + Bare(Span), +} + +impl Member { + pub(crate) fn to_expr(&self) -> hir::Expression { + match self { + Member::String(outer, inner) => hir::Expression::string(*inner, *outer), + Member::Bare(span) => hir::Expression::string(*span, *span), + } + } + + pub(crate) fn span(&self) -> Span { + match self { + Member::String(outer, _inner) => *outer, + Member::Bare(span) => *span, + } + } + + pub(crate) fn to_spanned_string(&self, source: &str) -> Spanned { + match self { + Member::String(outer, inner) => inner.string(source).spanned(*outer), + Member::Bare(span) => span.spanned_string(source), + } + } + + pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> { + match self { + Member::String(outer, _inner) => "string".tagged(outer), + Member::Bare(span) => "word".tagged(Tag { + span: *span, + anchor: None, + }), + } + } +} + +enum ColumnPathState { + Initial, + LeadingDot(Span), + Dot(Span, Vec, Span), + Member(Span, Vec), + Error(ShellError), +} + +impl ColumnPathState { + pub fn dot(self, dot: Span) -> ColumnPathState { + match self { + ColumnPathState::Initial => ColumnPathState::LeadingDot(dot), + ColumnPathState::LeadingDot(_) => { + ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Dot(..) => { + ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot), + ColumnPathState::Error(err) => ColumnPathState::Error(err), + } + } + + pub fn member(self, member: Member) -> ColumnPathState { + match self { + ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]), + ColumnPathState::LeadingDot(tag) => { + ColumnPathState::Member(tag.until(member.span()), vec![member]) + } + + ColumnPathState::Dot(tag, mut tags, _) => { + ColumnPathState::Member(tag.until(member.span()), { + tags.push(member); + tags + }) + } + ColumnPathState::Member(..) => { + ColumnPathState::Error(ShellError::type_error("column", member.tagged_type_name())) + } + ColumnPathState::Error(err) => ColumnPathState::Error(err), + } + } + + pub fn into_path(self, next: Peeked) -> Result>, ShellError> { + match self { + ColumnPathState::Initial => Err(next.type_error("column path")), + ColumnPathState::LeadingDot(dot) => { + Err(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Dot(_tag, _members, dot) => { + Err(ShellError::type_error("column", "dot".tagged(dot))) + } + ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)), + ColumnPathState::Error(err) => Err(err), + } + } +} + +pub fn expand_column_path<'a, 'b>( + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, +) -> Result>, ShellError> { + let mut state = ColumnPathState::Initial; + + loop { + let member = MemberShape.expand_syntax(token_nodes, context); + + match member { + Err(_) => break, + Ok(member) => state = state.member(member), + } + + let dot = DotShape.expand_syntax(token_nodes, context); + + match dot { + Err(_) => break, + Ok(dot) => state = state.dot(dot), + } + } + + state.into_path(token_nodes.peek_non_ws()) +} + +#[derive(Debug, Copy, Clone)] +pub struct ColumnPathShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ColumnPathShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + // If there's not even one member shape, fail + color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; + + loop { + let checkpoint = token_nodes.checkpoint(); + + match color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + checkpoint.iterator, + context, + shapes, + ) { + Err(_) => { + // we already saw at least one member shape, so return successfully + return Ok(()); + } + + Ok(_) => { + match color_fallible_syntax(&MemberShape, checkpoint.iterator, context, shapes) + { + Err(_) => { + // we saw a dot but not a member (but we saw at least one member), + // so don't commit the dot but return successfully + return Ok(()); + } + + Ok(_) => { + // we saw a dot and a member, so commit it and continue on + checkpoint.commit(); + } + } + } + } + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ColumnPathShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "ColumnPathShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + // If there's not even one member shape, fail + color_fallible_syntax(&MemberShape, token_nodes, context)?; + + loop { + let checkpoint = token_nodes.checkpoint(); + + match color_fallible_syntax_with( + &ColorableDotShape, + &FlatShape::Dot, + checkpoint.iterator, + context, + ) { + Err(_) => { + // we already saw at least one member shape, so return successfully + return Ok(()); + } + + Ok(_) => { + match color_fallible_syntax(&MemberShape, checkpoint.iterator, context) { + Err(_) => { + // we saw a dot but not a member (but we saw at least one member), + // so don't commit the dot but return successfully + return Ok(()); + } + + Ok(_) => { + // we saw a dot and a member, so commit it and continue on + checkpoint.commit(); + } + } + } + } + } + } +} + +impl ExpandSyntax for ColumnPathShape { + type Output = Tagged>; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + expand_column_path(token_nodes, context) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct MemberShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for MemberShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let bare = color_fallible_syntax_with( + &BareShape, + &FlatShape::BareMember, + token_nodes, + context, + shapes, + ); + + match bare { + Ok(_) => return Ok(()), + Err(_) => { + // If we don't have a bare word, we'll look for a string + } + } + + // Look for a string token. If we don't find one, fail + color_fallible_syntax_with( + &StringShape, + &FlatShape::StringMember, + token_nodes, + context, + shapes, + ) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for MemberShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "MemberShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let bare = + color_fallible_syntax_with(&BareShape, &FlatShape::BareMember, token_nodes, context); + + match bare { + Ok(_) => return Ok(()), + Err(_) => { + // If we don't have a bare word, we'll look for a string + } + } + + // Look for a string token. If we don't find one, fail + color_fallible_syntax_with(&StringShape, &FlatShape::StringMember, token_nodes, context) + } +} + +impl ExpandSyntax for MemberShape { + type Output = Member; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result { + let bare = BareShape.test(token_nodes, context); + if let Some(peeked) = bare { + let node = peeked.not_eof("column")?.commit(); + return Ok(Member::Bare(node.span())); + } + + let string = StringShape.test(token_nodes, context); + + if let Some(peeked) = string { + let node = peeked.not_eof("column")?.commit(); + let (outer, inner) = node.as_string().unwrap(); + + return Ok(Member::String(outer, inner)); + } + + Err(token_nodes.peek_any().type_error("column")) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct DotShape; + +#[derive(Debug, Copy, Clone)] +pub struct ColorableDotShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for ColorableDotShape { + type Info = (); + type Input = FlatShape; + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("dot")?; + + match peeked.node { + node if node.is_dot() => { + peeked.commit(); + shapes.push((*input).spanned(node.span())); + Ok(()) + } + + other => Err(ShellError::type_error("dot", other.tagged_type_name())), + } + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for ColorableDotShape { + type Info = (); + type Input = FlatShape; + + fn name(&self) -> &'static str { + "ColorableDotShape" + } + + fn color_syntax<'a, 'b>( + &self, + input: &FlatShape, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result<(), ShellError> { + let peeked = token_nodes.peek_any().not_eof("dot")?; + + match peeked.node { + node if node.is_dot() => { + peeked.commit(); + token_nodes.color_shape((*input).spanned(node.span())); + Ok(()) + } + + other => Err(ShellError::type_error("dot", other.tagged_type_name())), + } + } +} + +impl SkipSyntax for DotShape { + fn skip<'a, 'b>( + &self, + token_nodes: &mut TokensIterator<'_>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + expand_syntax(self, token_nodes, context)?; + + Ok(()) + } +} + +impl ExpandSyntax for DotShape { + type Output = Span; + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + _context: &ExpandContext, + ) -> Result { + parse_single_node(token_nodes, "dot", |token, token_span, _| { + Ok(match token { + RawToken::Operator(Operator::Dot) => token_span, + _ => { + return Err(ShellError::type_error( + "dot", + token.type_name().tagged(token_span), + )) + } + }) + }) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct InfixShape; + +#[cfg(not(coloring_in_tokens))] +impl FallibleColorSyntax for InfixShape { + type Info = (); + type Input = (); + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + outer_shapes: &mut Vec>, + ) -> Result<(), ShellError> { + let checkpoint = token_nodes.checkpoint(); + let mut shapes = vec![]; + + // An infix operator must be prefixed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?; + + // Parse the next TokenNode after the whitespace + parse_single_node( + checkpoint.iterator, + "infix operator", + |token, token_span, _| { + match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => { + shapes.push(FlatShape::Operator.spanned(token_span)); + Ok(()) + } + + // Otherwise, it's not a match + _ => Err(ShellError::type_error( + "infix operator", + token.type_name().tagged(token_span), + )), + } + }, + )?; + + // An infix operator must be followed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?; + + outer_shapes.extend(shapes); + checkpoint.commit(); + Ok(()) + } +} + +#[cfg(coloring_in_tokens)] +impl FallibleColorSyntax for InfixShape { + type Info = (); + type Input = (); + + fn name(&self) -> &'static str { + "InfixShape" + } + + fn color_syntax<'a, 'b>( + &self, + _input: &(), + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result<(), ShellError> { + let checkpoint = token_nodes.checkpoint(); + + // An infix operator must be prefixed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + // Parse the next TokenNode after the whitespace + let operator_span = parse_single_node( + checkpoint.iterator, + "infix operator", + |token, token_span, _| { + match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => Ok(token_span), + + // Otherwise, it's not a match + _ => Err(ShellError::type_error( + "infix operator", + token.type_name().tagged(token_span), + )), + } + }, + )?; + + checkpoint + .iterator + .color_shape(FlatShape::Operator.spanned(operator_span)); + + // An infix operator must be followed by whitespace. If no whitespace was found, fail + color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + checkpoint.commit(); + Ok(()) + } +} + +impl ExpandSyntax for InfixShape { + type Output = (Span, Spanned, Span); + + fn expand_syntax<'a, 'b>( + &self, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Result { + let checkpoint = token_nodes.checkpoint(); + + // An infix operator must be prefixed by whitespace + let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + // Parse the next TokenNode after the whitespace + let operator = parse_single_node( + checkpoint.iterator, + "infix operator", + |token, token_span, _| { + Ok(match token { + // If it's an operator (and not `.`), it's a match + RawToken::Operator(operator) if operator != Operator::Dot => { + operator.spanned(token_span) + } + + // Otherwise, it's not a match + _ => { + return Err(ShellError::type_error( + "infix operator", + token.type_name().tagged(token_span), + )) + } + }) + }, + )?; + + // An infix operator must be followed by whitespace + let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?; + + checkpoint.commit(); + + Ok((start, operator, end)) + } +} diff --git a/src/parser/hir/syntax_shape/flat_shape.rs b/src/parser/hir/syntax_shape/flat_shape.rs new file mode 100644 index 0000000000..b961d1f567 --- /dev/null +++ b/src/parser/hir/syntax_shape/flat_shape.rs @@ -0,0 +1,97 @@ +use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode}; +use crate::{Span, Spanned, SpannedItem, Text}; + +#[derive(Debug, Copy, Clone)] +pub enum FlatShape { + OpenDelimiter(Delimiter), + CloseDelimiter(Delimiter), + ItVariable, + Variable, + Operator, + Dot, + InternalCommand, + ExternalCommand, + ExternalWord, + BareMember, + StringMember, + String, + Path, + Word, + Pipe, + GlobPattern, + Flag, + ShorthandFlag, + Int, + Decimal, + Whitespace, + Error, + Size { number: Span, unit: Span }, +} + +impl FlatShape { + pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec>) -> () { + match token { + TokenNode::Token(token) => match token.item { + RawToken::Number(RawNumber::Int(_)) => { + shapes.push(FlatShape::Int.spanned(token.span)) + } + RawToken::Number(RawNumber::Decimal(_)) => { + shapes.push(FlatShape::Decimal.spanned(token.span)) + } + RawToken::Operator(Operator::Dot) => { + shapes.push(FlatShape::Dot.spanned(token.span)) + } + RawToken::Operator(_) => shapes.push(FlatShape::Operator.spanned(token.span)), + RawToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)), + RawToken::Variable(v) if v.slice(source) == "it" => { + shapes.push(FlatShape::ItVariable.spanned(token.span)) + } + RawToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)), + RawToken::ExternalCommand(_) => { + shapes.push(FlatShape::ExternalCommand.spanned(token.span)) + } + RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(token.span)), + RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(token.span)), + RawToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)), + }, + TokenNode::Call(_) => unimplemented!(), + TokenNode::Nodes(nodes) => { + for node in &nodes.item { + FlatShape::from(node, source, shapes); + } + } + TokenNode::Delimited(v) => { + shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0)); + for token in &v.item.children { + FlatShape::from(token, source, shapes); + } + shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1)); + } + TokenNode::Pipeline(pipeline) => { + for part in &pipeline.parts { + if let Some(_) = part.pipe { + shapes.push(FlatShape::Pipe.spanned(part.span)); + } + } + } + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Longhand, + .. + }, + span, + }) => shapes.push(FlatShape::Flag.spanned(*span)), + TokenNode::Flag(Spanned { + item: + Flag { + kind: FlagKind::Shorthand, + .. + }, + span, + }) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)), + TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())), + TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)), + } + } +} diff --git a/src/parser/hir/tokens_iterator.rs b/src/parser/hir/tokens_iterator.rs new file mode 100644 index 0000000000..8e2f4a8f88 --- /dev/null +++ b/src/parser/hir/tokens_iterator.rs @@ -0,0 +1,722 @@ +pub(crate) mod debug; + +use self::debug::Tracer; +use crate::errors::ShellError; +#[cfg(coloring_in_tokens)] +use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::TokenNode; +use crate::prelude::*; +use crate::{Span, Spanned, SpannedItem}; +#[allow(unused)] +use getset::{Getters, MutGetters}; + +#[derive(Getters, Debug)] +pub struct TokensIteratorState<'content> { + tokens: &'content [TokenNode], + span: Span, + skip_ws: bool, + index: usize, + seen: indexmap::IndexSet, + #[cfg(coloring_in_tokens)] + #[cfg_attr(coloring_in_tokens, get = "pub")] + shapes: Vec>, +} + +#[derive(Getters, MutGetters, Debug)] +pub struct TokensIterator<'content> { + #[get = "pub"] + #[get_mut = "pub"] + state: TokensIteratorState<'content>, + #[get = "pub"] + #[get_mut = "pub"] + tracer: Tracer, +} + +#[derive(Debug)] +pub struct Checkpoint<'content, 'me> { + pub(crate) iterator: &'me mut TokensIterator<'content>, + index: usize, + seen: indexmap::IndexSet, + #[cfg(coloring_in_tokens)] + shape_start: usize, + committed: bool, +} + +impl<'content, 'me> Checkpoint<'content, 'me> { + pub(crate) fn commit(mut self) { + self.committed = true; + } +} + +impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> { + fn drop(&mut self) { + if !self.committed { + let state = &mut self.iterator.state; + + state.index = self.index; + state.seen = self.seen.clone(); + #[cfg(coloring_in_tokens)] + state.shapes.truncate(self.shape_start); + } + } +} + +#[derive(Debug)] +pub struct Peeked<'content, 'me> { + pub(crate) node: Option<&'content TokenNode>, + iterator: &'me mut TokensIterator<'content>, + from: usize, + to: usize, +} + +impl<'content, 'me> Peeked<'content, 'me> { + pub fn commit(&mut self) -> Option<&'content TokenNode> { + let Peeked { + node, + iterator, + from, + to, + } = self; + + let node = (*node)?; + iterator.commit(*from, *to); + Some(node) + } + + pub fn not_eof( + self, + expected: impl Into, + ) -> Result, ShellError> { + match self.node { + None => Err(ShellError::unexpected_eof( + expected, + self.iterator.eof_span(), + )), + Some(node) => Ok(PeekedNode { + node, + iterator: self.iterator, + from: self.from, + to: self.to, + }), + } + } + + pub fn type_error(&self, expected: impl Into) -> ShellError { + peek_error(&self.node, self.iterator.eof_span(), expected) + } +} + +#[derive(Debug)] +pub struct PeekedNode<'content, 'me> { + pub(crate) node: &'content TokenNode, + iterator: &'me mut TokensIterator<'content>, + from: usize, + to: usize, +} + +impl<'content, 'me> PeekedNode<'content, 'me> { + pub fn commit(self) -> &'content TokenNode { + let PeekedNode { + node, + iterator, + from, + to, + } = self; + + iterator.commit(from, to); + node + } + + pub fn rollback(self) {} + + pub fn type_error(&self, expected: impl Into) -> ShellError { + peek_error(&Some(self.node), self.iterator.eof_span(), expected) + } +} + +pub fn peek_error( + node: &Option<&TokenNode>, + eof_span: Span, + expected: impl Into, +) -> ShellError { + match node { + None => ShellError::unexpected_eof(expected, eof_span), + Some(node) => ShellError::type_error(expected, node.tagged_type_name()), + } +} + +impl<'content> TokensIterator<'content> { + pub fn new( + items: &'content [TokenNode], + span: Span, + skip_ws: bool, + ) -> TokensIterator<'content> { + TokensIterator { + state: TokensIteratorState { + tokens: items, + span, + skip_ws, + index: 0, + seen: indexmap::IndexSet::new(), + #[cfg(coloring_in_tokens)] + shapes: vec![], + }, + tracer: Tracer::new(), + } + } + + pub fn all(tokens: &'content [TokenNode], span: Span) -> TokensIterator<'content> { + TokensIterator::new(tokens, span, false) + } + + pub fn len(&self) -> usize { + self.state.tokens.len() + } + + pub fn spanned( + &mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> T, + ) -> Spanned { + let start = self.span_at_cursor(); + + let result = block(self); + + let end = self.span_at_cursor(); + + result.spanned(start.until(end)) + } + + #[cfg(coloring_in_tokens)] + pub fn color_shape(&mut self, shape: Spanned) { + self.with_tracer(|_, tracer| tracer.add_shape(shape)); + self.state.shapes.push(shape); + } + + #[cfg(coloring_in_tokens)] + pub fn mutate_shapes(&mut self, block: impl FnOnce(&mut Vec>)) { + let new_shapes: Vec> = { + let shapes = &mut self.state.shapes; + let len = shapes.len(); + block(shapes); + (len..(shapes.len())).map(|i| shapes[i]).collect() + }; + + self.with_tracer(|_, tracer| { + for shape in new_shapes { + tracer.add_shape(shape) + } + }); + } + + #[cfg(coloring_in_tokens)] + pub fn silently_mutate_shapes(&mut self, block: impl FnOnce(&mut Vec>)) { + let shapes = &mut self.state.shapes; + block(shapes); + } + + #[cfg(coloring_in_tokens)] + pub fn sort_shapes(&mut self) { + // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring + // this solution. + + self.state + .shapes + .sort_by(|a, b| a.span.start().cmp(&b.span.start())); + } + + #[cfg(coloring_in_tokens)] + pub fn child<'me, T>( + &'me mut self, + tokens: Spanned<&'me [TokenNode]>, + block: impl FnOnce(&mut TokensIterator<'me>) -> T, + ) -> T { + let mut shapes = vec![]; + std::mem::swap(&mut shapes, &mut self.state.shapes); + + let mut tracer = Tracer::new(); + std::mem::swap(&mut tracer, &mut self.tracer); + + let mut iterator = TokensIterator { + state: TokensIteratorState { + tokens: tokens.item, + span: tokens.span, + skip_ws: false, + index: 0, + seen: indexmap::IndexSet::new(), + shapes, + }, + tracer, + }; + + let result = block(&mut iterator); + + std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes); + std::mem::swap(&mut iterator.tracer, &mut self.tracer); + + result + } + + #[cfg(not(coloring_in_tokens))] + pub fn child<'me, T>( + &'me mut self, + tokens: Spanned<&'me [TokenNode]>, + block: impl FnOnce(&mut TokensIterator<'me>) -> T, + ) -> T { + let mut tracer = Tracer::new(); + std::mem::swap(&mut tracer, &mut self.tracer); + + let mut iterator = TokensIterator { + state: TokensIteratorState { + tokens: tokens.item, + span: tokens.span, + skip_ws: false, + index: 0, + seen: indexmap::IndexSet::new(), + }, + tracer, + }; + + let result = block(&mut iterator); + + std::mem::swap(&mut iterator.tracer, &mut self.tracer); + + result + } + + pub fn with_tracer(&mut self, block: impl FnOnce(&mut TokensIteratorState, &mut Tracer)) { + let state = &mut self.state; + let tracer = &mut self.tracer; + + block(state, tracer) + } + + #[cfg(coloring_in_tokens)] + pub fn color_frame( + &mut self, + desc: &'static str, + block: impl FnOnce(&mut TokensIterator) -> T, + ) -> T { + self.with_tracer(|_, tracer| tracer.start(desc)); + + let result = block(self); + + self.with_tracer(|_, tracer| { + tracer.success(); + }); + + result + } + + pub fn color_fallible_frame( + &mut self, + desc: &'static str, + block: impl FnOnce(&mut TokensIterator) -> Result, + ) -> Result { + self.with_tracer(|_, tracer| tracer.start(desc)); + + if self.at_end() { + self.with_tracer(|_, tracer| tracer.eof_frame()); + return Err(ShellError::unexpected_eof("coloring", Tag::unknown())); + } + + let result = block(self); + + self.with_tracer(|_, tracer| match &result { + Ok(_) => { + tracer.success(); + } + + Err(err) => tracer.failed(err), + }); + + result + } + + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> { + let state = &mut self.state; + + let index = state.index; + #[cfg(coloring_in_tokens)] + let shape_start = state.shapes.len(); + let seen = state.seen.clone(); + + Checkpoint { + iterator: self, + index, + seen, + committed: false, + #[cfg(coloring_in_tokens)] + shape_start, + } + } + + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn atomic<'me, T>( + &'me mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> Result, + ) -> Result { + let state = &mut self.state; + + let index = state.index; + #[cfg(coloring_in_tokens)] + let shape_start = state.shapes.len(); + let seen = state.seen.clone(); + + let checkpoint = Checkpoint { + iterator: self, + index, + seen, + committed: false, + #[cfg(coloring_in_tokens)] + shape_start, + }; + + let value = block(checkpoint.iterator)?; + + checkpoint.commit(); + return Ok(value); + } + + #[cfg(coloring_in_tokens)] + /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure + /// that you'll succeed. + pub fn atomic_returning_shapes<'me, T>( + &'me mut self, + block: impl FnOnce(&mut TokensIterator<'content>) -> Result, + ) -> (Result, Vec>) { + let index = self.state.index; + let mut shapes = vec![]; + + let seen = self.state.seen.clone(); + std::mem::swap(&mut self.state.shapes, &mut shapes); + + let checkpoint = Checkpoint { + iterator: self, + index, + seen, + committed: false, + shape_start: 0, + }; + + let value = block(checkpoint.iterator); + + let value = match value { + Err(err) => { + drop(checkpoint); + std::mem::swap(&mut self.state.shapes, &mut shapes); + return (Err(err), vec![]); + } + + Ok(value) => value, + }; + + checkpoint.commit(); + std::mem::swap(&mut self.state.shapes, &mut shapes); + return (Ok(value), shapes); + } + + fn eof_span(&self) -> Span { + Span::new(self.state.span.end(), self.state.span.end()) + } + + pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> { + let next = self.peek_any(); + + match next.node { + None => "end".spanned(self.eof_span()), + Some(node) => node.spanned_type_name(), + } + } + + pub fn whole_span(&self) -> Span { + self.state.span + } + + pub fn span_at_cursor(&mut self) -> Span { + let next = self.peek_any(); + + match next.node { + None => self.eof_span(), + Some(node) => node.span(), + } + } + + pub fn remove(&mut self, position: usize) { + self.state.seen.insert(position); + } + + pub fn at_end(&self) -> bool { + peek(self, self.state.skip_ws).is_none() + } + + pub fn at_end_possible_ws(&self) -> bool { + peek(self, true).is_none() + } + + pub fn advance(&mut self) { + self.state.seen.insert(self.state.index); + self.state.index += 1; + } + + pub fn extract(&mut self, f: impl Fn(&TokenNode) -> Option) -> Option<(usize, T)> { + let state = &mut self.state; + + for (i, item) in state.tokens.iter().enumerate() { + if state.seen.contains(&i) { + continue; + } + + match f(item) { + None => { + continue; + } + Some(value) => { + state.seen.insert(i); + return Some((i, value)); + } + } + } + + None + } + + pub fn move_to(&mut self, pos: usize) { + self.state.index = pos; + } + + pub fn restart(&mut self) { + self.state.index = 0; + } + + pub fn clone(&self) -> TokensIterator<'content> { + let state = &self.state; + TokensIterator { + state: TokensIteratorState { + tokens: state.tokens, + span: state.span, + index: state.index, + seen: state.seen.clone(), + skip_ws: state.skip_ws, + #[cfg(coloring_in_tokens)] + shapes: state.shapes.clone(), + }, + tracer: self.tracer.clone(), + } + } + + // Get the next token, not including whitespace + pub fn next_non_ws(&mut self) -> Option<&TokenNode> { + let mut peeked = start_next(self, true); + peeked.commit() + } + + // Peek the next token, not including whitespace + pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> { + start_next(self, true) + } + + // Peek the next token, including whitespace + pub fn peek_any<'me>(&'me mut self) -> Peeked<'content, 'me> { + start_next(self, false) + } + + // Peek the next token, including whitespace, but not EOF + pub fn peek_any_token<'me, T>( + &'me mut self, + expected: &'static str, + block: impl FnOnce(&'content TokenNode) -> Result, + ) -> Result { + let peeked = start_next(self, false); + let peeked = peeked.not_eof(expected); + + match peeked { + Err(err) => return Err(err), + Ok(peeked) => match block(peeked.node) { + Err(err) => return Err(err), + Ok(val) => { + peeked.commit(); + return Ok(val); + } + }, + } + } + + fn commit(&mut self, from: usize, to: usize) { + for index in from..to { + self.state.seen.insert(index); + } + + self.state.index = to; + } + + pub fn pos(&self, skip_ws: bool) -> Option { + peek_pos(self, skip_ws) + } + + pub fn debug_remaining(&self) -> Vec { + let mut tokens = self.clone(); + tokens.restart(); + tokens.cloned().collect() + } +} + +impl<'content> Iterator for TokensIterator<'content> { + type Item = &'content TokenNode; + + fn next(&mut self) -> Option { + next(self, self.state.skip_ws) + } +} + +fn peek<'content, 'me>( + iterator: &'me TokensIterator<'content>, + skip_ws: bool, +) -> Option<&'me TokenNode> { + let state = iterator.state(); + + let mut to = state.index; + + loop { + if to >= state.tokens.len() { + return None; + } + + if state.seen.contains(&to) { + to += 1; + continue; + } + + if to >= state.tokens.len() { + return None; + } + + let node = &state.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => { + return Some(node); + } + } + } +} + +fn peek_pos<'content, 'me>( + iterator: &'me TokensIterator<'content>, + skip_ws: bool, +) -> Option { + let state = iterator.state(); + + let mut to = state.index; + + loop { + if to >= state.tokens.len() { + return None; + } + + if state.seen.contains(&to) { + to += 1; + continue; + } + + if to >= state.tokens.len() { + return None; + } + + let node = &state.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => return Some(to), + } + } +} + +fn start_next<'content, 'me>( + iterator: &'me mut TokensIterator<'content>, + skip_ws: bool, +) -> Peeked<'content, 'me> { + let state = iterator.state(); + + let from = state.index; + let mut to = state.index; + + loop { + if to >= state.tokens.len() { + return Peeked { + node: None, + iterator, + from, + to, + }; + } + + if state.seen.contains(&to) { + to += 1; + continue; + } + + if to >= state.tokens.len() { + return Peeked { + node: None, + iterator, + from, + to, + }; + } + + let node = &state.tokens[to]; + + match node { + TokenNode::Whitespace(_) if skip_ws => { + to += 1; + } + _ => { + to += 1; + return Peeked { + node: Some(node), + iterator, + from, + to, + }; + } + } + } +} + +fn next<'me, 'content>( + iterator: &'me mut TokensIterator<'content>, + skip_ws: bool, +) -> Option<&'content TokenNode> { + loop { + if iterator.state().index >= iterator.state().tokens.len() { + return None; + } + + if iterator.state().seen.contains(&iterator.state().index) { + iterator.advance(); + continue; + } + + if iterator.state().index >= iterator.state().tokens.len() { + return None; + } + + match &iterator.state().tokens[iterator.state().index] { + TokenNode::Whitespace(_) if skip_ws => { + iterator.advance(); + } + other => { + iterator.advance(); + return Some(other); + } + } + } +} diff --git a/src/parser/hir/tokens_iterator/debug.rs b/src/parser/hir/tokens_iterator/debug.rs new file mode 100644 index 0000000000..332a74067c --- /dev/null +++ b/src/parser/hir/tokens_iterator/debug.rs @@ -0,0 +1,379 @@ +use crate::errors::ShellError; +use crate::parser::hir::syntax_shape::FlatShape; +use crate::parser::hir::tokens_iterator::TokensIteratorState; +use crate::prelude::*; +use crate::traits::ToDebug; +use ansi_term::Color; +use log::trace; +use ptree::*; +use std::borrow::Cow; +use std::io; + +#[derive(Debug)] +pub(crate) enum DebugIteratorToken { + Seen(String), + Unseen(String), + Cursor, +} + +pub(crate) fn debug_tokens(state: &TokensIteratorState, source: &str) -> Vec { + let mut out = vec![]; + + for (i, token) in state.tokens.iter().enumerate() { + if state.index == i { + out.push(DebugIteratorToken::Cursor); + } + + if state.seen.contains(&i) { + out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source)))); + } else { + out.push(DebugIteratorToken::Unseen(format!( + "{}", + token.debug(source) + ))); + } + } + + out +} + +#[derive(Debug, Clone)] +pub enum FrameChild { + #[allow(unused)] + Shape(Spanned), + Frame(ColorFrame), +} + +impl FrameChild { + fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { + match self { + FrameChild::Shape(shape) => write!( + f, + "{} {:?}", + Color::White + .bold() + .on(Color::Green) + .paint(format!("{:?}", shape.item)), + shape.span.slice(text) + ), + + FrameChild::Frame(frame) => frame.colored_leaf_description(f), + } + } + + fn into_tree_child(self, text: &Text) -> TreeChild { + match self { + FrameChild::Shape(shape) => TreeChild::Shape(shape, text.clone()), + FrameChild::Frame(frame) => TreeChild::Frame(frame, text.clone()), + } + } +} + +#[derive(Debug, Clone)] +pub struct ColorFrame { + description: &'static str, + children: Vec, + error: Option, +} + +impl ColorFrame { + fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + if self.has_only_error_descendents() { + if self.children.len() == 0 { + write!( + f, + "{}", + Color::White.bold().on(Color::Red).paint(self.description) + ) + } else { + write!(f, "{}", Color::Red.normal().paint(self.description)) + } + } else if self.has_descendent_shapes() { + write!(f, "{}", Color::Green.normal().paint(self.description)) + } else { + write!(f, "{}", Color::Yellow.bold().paint(self.description)) + } + } + + fn colored_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> { + if self.children.len() == 1 { + let child = &self.children[0]; + + self.colored_leaf_description(f)?; + write!(f, " -> ")?; + child.colored_leaf_description(text, f) + } else { + self.colored_leaf_description(f) + } + } + + fn children_for_formatting(&self, text: &Text) -> Vec { + if self.children.len() == 1 { + let child = &self.children[0]; + + match child { + FrameChild::Shape(_) => vec![], + FrameChild::Frame(frame) => frame.tree_children(text), + } + } else { + self.tree_children(text) + } + } + + fn tree_children(&self, text: &Text) -> Vec { + self.children + .clone() + .into_iter() + .map(|c| c.into_tree_child(text)) + .collect() + } + + #[allow(unused)] + fn add_shape(&mut self, shape: Spanned) { + self.children.push(FrameChild::Shape(shape)) + } + + fn has_child_shapes(&self) -> bool { + self.any_child_shape(|_| true) + } + + fn any_child_shape(&self, predicate: impl Fn(Spanned) -> bool) -> bool { + for item in &self.children { + match item { + FrameChild::Shape(shape) => { + if predicate(*shape) { + return true; + } + } + + _ => {} + } + } + + false + } + + fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool { + for item in &self.children { + match item { + FrameChild::Frame(frame) => { + if predicate(frame) { + return true; + } + } + + _ => {} + } + } + + false + } + + fn has_descendent_shapes(&self) -> bool { + if self.has_child_shapes() { + true + } else { + self.any_child_frame(|frame| frame.has_descendent_shapes()) + } + } + + fn has_only_error_descendents(&self) -> bool { + if self.children.len() == 0 { + // if this frame has no children at all, it has only error descendents if this frame + // is an error + self.error.is_some() + } else { + // otherwise, it has only error descendents if all of its children terminate in an + // error (transitively) + + let mut seen_error = false; + + for child in &self.children { + match child { + // if this frame has at least one child shape, this frame has non-error descendents + FrameChild::Shape(_) => return false, + FrameChild::Frame(frame) => { + // if the chi + if frame.has_only_error_descendents() { + seen_error = true; + } else { + return false; + } + } + } + } + + seen_error + } + } +} + +#[derive(Debug, Clone)] +pub enum TreeChild { + Shape(Spanned, Text), + Frame(ColorFrame, Text), +} + +impl TreeChild { + fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { + match self { + TreeChild::Shape(shape, text) => write!( + f, + "{} {:?}", + Color::White + .bold() + .on(Color::Green) + .paint(format!("{:?}", shape.item)), + shape.span.slice(text) + ), + + TreeChild::Frame(frame, _) => frame.colored_leaf_description(f), + } + } +} + +impl TreeItem for TreeChild { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, _style: &Style) -> io::Result<()> { + match self { + shape @ TreeChild::Shape(..) => shape.colored_leaf_description(f), + + TreeChild::Frame(frame, text) => frame.colored_description(text, f), + } + } + + fn children(&self) -> Cow<[Self::Child]> { + match self { + TreeChild::Shape(..) => Cow::Borrowed(&[]), + TreeChild::Frame(frame, text) => Cow::Owned(frame.children_for_formatting(text)), + } + } +} + +#[derive(Debug, Clone)] +pub struct Tracer { + frame_stack: Vec, +} + +impl Tracer { + pub fn print(self, source: Text) -> PrintTracer { + PrintTracer { + tracer: self, + source, + } + } + + pub fn new() -> Tracer { + let root = ColorFrame { + description: "Trace", + children: vec![], + error: None, + }; + + Tracer { + frame_stack: vec![root], + } + } + + fn current_frame(&mut self) -> &mut ColorFrame { + let frames = &mut self.frame_stack; + let last = frames.len() - 1; + &mut frames[last] + } + + fn pop_frame(&mut self) -> ColorFrame { + let result = self.frame_stack.pop().expect("Can't pop root tracer frame"); + + if self.frame_stack.len() == 0 { + panic!("Can't pop root tracer frame"); + } + + self.debug(); + + result + } + + pub fn start(&mut self, description: &'static str) { + let frame = ColorFrame { + description, + children: vec![], + error: None, + }; + + self.frame_stack.push(frame); + self.debug(); + } + + pub fn eof_frame(&mut self) { + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + #[allow(unused)] + pub fn finish(&mut self) { + loop { + if self.frame_stack.len() == 1 { + break; + } + + let frame = self.pop_frame(); + self.current_frame().children.push(FrameChild::Frame(frame)); + } + } + + #[allow(unused)] + pub fn add_shape(&mut self, shape: Spanned) { + self.current_frame().add_shape(shape); + } + + pub fn success(&mut self) { + let current = self.pop_frame(); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + pub fn failed(&mut self, error: &ShellError) { + let mut current = self.pop_frame(); + current.error = Some(error.clone()); + self.current_frame() + .children + .push(FrameChild::Frame(current)); + } + + fn debug(&self) { + trace!(target: "nu::color_syntax", + "frames = {:?}", + self.frame_stack + .iter() + .map(|f| f.description) + .collect::>() + ); + + trace!(target: "nu::color_syntax", "{:#?}", self); + } +} + +#[derive(Debug, Clone)] +pub struct PrintTracer { + tracer: Tracer, + source: Text, +} + +impl TreeItem for PrintTracer { + type Child = TreeChild; + + fn write_self(&self, f: &mut W, style: &Style) -> io::Result<()> { + write!(f, "{}", style.paint("Color Trace")) + } + + fn children(&self) -> Cow<[Self::Child]> { + Cow::Owned(vec![TreeChild::Frame( + self.tracer.frame_stack[0].clone(), + self.source.clone(), + )]) + } +} diff --git a/src/parser/parse/call_node.rs b/src/parser/parse/call_node.rs index 2869abb449..eb715cd376 100644 --- a/src/parser/parse/call_node.rs +++ b/src/parser/parse/call_node.rs @@ -1,5 +1,7 @@ use crate::parser::TokenNode; +use crate::traits::ToDebug; use getset::Getters; +use std::fmt; #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)] pub struct CallNode { @@ -24,3 +26,17 @@ impl CallNode { } } } + +impl ToDebug for CallNode { + fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { + write!(f, "{}", self.head.debug(source))?; + + if let Some(children) = &self.children { + for child in children { + write!(f, "{}", child.debug(source))? + } + } + + Ok(()) + } +} diff --git a/src/parser/parse/files.rs b/src/parser/parse/files.rs index 173da54a80..8a2d3c90eb 100644 --- a/src/parser/parse/files.rs +++ b/src/parser/parse/files.rs @@ -1,6 +1,7 @@ use crate::Span; use derive_new::new; use language_reporting::{FileName, Location}; +use log::trace; #[derive(new, Debug, Clone)] pub struct Files { @@ -17,24 +18,38 @@ impl language_reporting::ReportingFiles for Files { from_index: usize, to_index: usize, ) -> Option { - Some(Span::from((from_index, to_index))) + Some(Span::new(from_index, to_index)) } - fn file_id(&self, _span: Self::Span) -> Self::FileId { + + fn file_id(&self, _tag: Self::Span) -> Self::FileId { 0 } + fn file_name(&self, _file: Self::FileId) -> FileName { FileName::Verbatim(format!("shell")) } + fn byte_index(&self, _file: Self::FileId, _line: usize, _column: usize) -> Option { unimplemented!("byte_index") } + fn location(&self, _file: Self::FileId, byte_index: usize) -> Option { let source = &self.snippet; let mut seen_lines = 0; let mut seen_bytes = 0; - for (pos, _) in source.match_indices('\n') { - if pos > byte_index { + for (pos, slice) in source.match_indices('\n') { + trace!( + "SEARCH={} SEEN={} POS={} SLICE={:?} LEN={} ALL={:?}", + byte_index, + seen_bytes, + pos, + slice, + source.len(), + source + ); + + if pos >= byte_index { return Some(language_reporting::Location::new( seen_lines, byte_index - seen_bytes, @@ -48,9 +63,10 @@ impl language_reporting::ReportingFiles for Files { if seen_lines == 0 { Some(language_reporting::Location::new(0, byte_index)) } else { - None + panic!("byte index {} wasn't valid", byte_index); } } + fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option { let source = &self.snippet; let mut seen_lines = 0; @@ -58,7 +74,7 @@ impl language_reporting::ReportingFiles for Files { for (pos, _) in source.match_indices('\n') { if seen_lines == lineno { - return Some(Span::from((seen_bytes, pos))); + return Some(Span::new(seen_bytes, pos + 1)); } else { seen_lines += 1; seen_bytes = pos + 1; @@ -66,17 +82,20 @@ impl language_reporting::ReportingFiles for Files { } if seen_lines == 0 { - Some(Span::from((0, self.snippet.len() - 1))) + Some(Span::new(0, self.snippet.len() - 1)) } else { None } } + fn source(&self, span: Self::Span) -> Option { - if span.start > span.end { + trace!("source(tag={:?}) snippet={:?}", span, self.snippet); + + if span.start() > span.end() { return None; - } else if span.end >= self.snippet.len() { + } else if span.end() > self.snippet.len() { return None; } - Some(self.snippet[span.start..span.end].to_string()) + Some(span.slice(&self.snippet).to_string()) } } diff --git a/src/parser/parse/flag.rs b/src/parser/parse/flag.rs index 096d69879f..28b6749f1c 100644 --- a/src/parser/parse/flag.rs +++ b/src/parser/parse/flag.rs @@ -1,4 +1,5 @@ -use crate::Span; +use crate::parser::hir::syntax_shape::flat_shape::FlatShape; +use crate::{Span, Spanned, SpannedItem}; use derive_new::new; use getset::Getters; use serde::{Deserialize, Serialize}; @@ -12,6 +13,15 @@ pub enum FlagKind { #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)] #[get = "pub(crate)"] pub struct Flag { - kind: FlagKind, - name: Span, + pub(crate) kind: FlagKind, + pub(crate) name: Span, +} + +impl Spanned { + pub fn color(&self) -> Spanned { + match self.item.kind { + FlagKind::Longhand => FlatShape::Flag.spanned(self.span), + FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span), + } + } } diff --git a/src/parser/parse/operator.rs b/src/parser/parse/operator.rs index 1942b6012c..7b5a5c77d8 100644 --- a/src/parser/parse/operator.rs +++ b/src/parser/parse/operator.rs @@ -11,6 +11,7 @@ pub enum Operator { GreaterThan, LessThanOrEqual, GreaterThanOrEqual, + Dot, } impl ToDebug for Operator { @@ -20,7 +21,6 @@ impl ToDebug for Operator { } impl Operator { - pub fn print(&self) -> String { self.as_str().to_string() } @@ -33,6 +33,7 @@ impl Operator { Operator::GreaterThan => ">", Operator::LessThanOrEqual => "<=", Operator::GreaterThanOrEqual => ">=", + Operator::Dot => ".", } } } @@ -53,6 +54,7 @@ impl FromStr for Operator { ">" => Ok(Operator::GreaterThan), "<=" => Ok(Operator::LessThanOrEqual), ">=" => Ok(Operator::GreaterThanOrEqual), + "." => Ok(Operator::Dot), _ => Err(()), } } diff --git a/src/parser/parse/parser.rs b/src/parser/parse/parser.rs index 238a144d5a..0dd1bc8566 100644 --- a/src/parser/parse/parser.rs +++ b/src/parser/parse/parser.rs @@ -5,7 +5,7 @@ use crate::parser::parse::{ tokens::*, unit::*, }; use crate::prelude::*; -use crate::{Span, Tagged}; +use crate::{Tag, Tagged}; use nom; use nom::branch::*; use nom::bytes::complete::*; @@ -14,23 +14,49 @@ use nom::combinator::*; use nom::multi::*; use nom::sequence::*; +use derive_new::new; use log::trace; use nom::dbg; use nom::*; use nom::{AsBytes, FindSubstring, IResult, InputLength, InputTake, Slice}; -use nom5_locate::{position, LocatedSpan}; +use nom_locate::{position, LocatedSpanEx}; +use nom_tracable::{tracable_parser, HasTracableInfo, TracableInfo}; use serde::{Deserialize, Serialize}; use std::fmt::Debug; use std::str::FromStr; -pub type NomSpan<'a> = LocatedSpan<&'a str>; +pub type NomSpan<'a> = LocatedSpanEx<&'a str, TracableContext>; + +#[derive(Debug, Clone, Copy, PartialEq, new)] +pub struct TracableContext { + pub(crate) info: TracableInfo, +} + +impl HasTracableInfo for TracableContext { + fn get_tracable_info(&self) -> TracableInfo { + self.info + } + + fn set_tracable_info(mut self, info: TracableInfo) -> Self { + TracableContext { info } + } +} + +impl std::ops::Deref for TracableContext { + type Target = TracableInfo; + + fn deref(&self) -> &TracableInfo { + &self.info + } +} pub fn nom_input(s: &str) -> NomSpan<'_> { - LocatedSpan::new(s) + LocatedSpanEx::new_extra(s, TracableContext::new(TracableInfo::new())) } macro_rules! operator { ($name:tt : $token:tt ) => { + #[tracable_parser] pub fn $name(input: NomSpan) -> IResult { let start = input.offset; let (input, tag) = tag(stringify!($token))(input)?; @@ -38,7 +64,7 @@ macro_rules! operator { Ok(( input, - TokenTreeBuilder::spanned_op(tag.fragment, (start, end)), + TokenTreeBuilder::spanned_op(tag.fragment, Span::new(start, end)), )) } }; @@ -50,25 +76,7 @@ operator! { gte: >= } operator! { lte: <= } operator! { eq: == } operator! { neq: != } - -fn trace_step<'a, T: Debug>( - input: NomSpan<'a>, - name: &str, - block: impl FnOnce(NomSpan<'a>) -> IResult, T>, -) -> IResult, T> { - trace!(target: "nu::lite_parse", "+ before {} @ {:?}", name, input); - match block(input) { - Ok((input, result)) => { - trace!(target: "nu::lite_parse", "after {} @ {:?} -> {:?}", name, input, result); - Ok((input, result)) - } - - Err(e) => { - trace!(target: "nu::lite_parse", "- failed {} :: {:?}", name, e); - Err(e) - } - } -} +operator! { dot: . } #[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)] pub enum Number { @@ -76,6 +84,15 @@ pub enum Number { Decimal(BigDecimal), } +impl std::fmt::Display for Number { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Number::Int(int) => write!(f, "{}", int), + Number::Decimal(decimal) => write!(f, "{}", decimal), + } + } +} + macro_rules! primitive_int { ($($ty:ty)*) => { $( @@ -147,467 +164,473 @@ impl Into for BigDecimal { } } -pub fn raw_number(input: NomSpan) -> IResult> { - let original = input; +#[tracable_parser] +pub fn number(input: NomSpan) -> IResult { + let (input, number) = raw_number(input)?; + + Ok(( + input, + TokenTreeBuilder::spanned_number(number.item, number.span), + )) +} + +#[tracable_parser] +pub fn raw_number(input: NomSpan) -> IResult> { + let anchoral = input; let start = input.offset; - trace_step(input, "raw_decimal", move |input| { - let (input, neg) = opt(tag("-"))(input)?; - let (input, head) = digit1(input)?; - let dot: IResult = tag(".")(input); + let (input, neg) = opt(tag("-"))(input)?; + let (input, head) = digit1(input)?; - let input = match dot { - Ok((input, dot)) => input, - - // it's just an integer - Err(_) => return Ok((input, RawNumber::int((start, input.offset)))), - }; - - let (input, tail) = digit1(input)?; - - let end = input.offset; - - Ok((input, RawNumber::decimal((start, end)))) - }) -} - -pub fn operator(input: NomSpan) -> IResult { - trace_step(input, "operator", |input| { - let (input, operator) = alt((gte, lte, neq, gt, lt, eq))(input)?; - - Ok((input, operator)) - }) -} - -pub fn dq_string(input: NomSpan) -> IResult { - trace_step(input, "dq_string", |input| { - let start = input.offset; - let (input, _) = char('"')(input)?; - let start1 = input.offset; - let (input, _) = many0(none_of("\""))(input)?; - let end1 = input.offset; - let (input, _) = char('"')(input)?; - let end = input.offset; - Ok(( - input, - TokenTreeBuilder::spanned_string((start1, end1), (start, end)), - )) - }) -} - -pub fn sq_string(input: NomSpan) -> IResult { - trace_step(input, "sq_string", move |input| { - let start = input.offset; - let (input, _) = char('\'')(input)?; - let start1 = input.offset; - let (input, _) = many0(none_of("\'"))(input)?; - let end1 = input.offset; - let (input, _) = char('\'')(input)?; - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::spanned_string((start1, end1), (start, end)), - )) - }) -} - -pub fn string(input: NomSpan) -> IResult { - trace_step(input, "string", move |input| { - alt((sq_string, dq_string))(input) - }) -} - -pub fn external(input: NomSpan) -> IResult { - trace_step(input, "external", move |input| { - let start = input.offset; - let (input, _) = tag("^")(input)?; - let (input, bare) = take_while(is_bare_char)(input)?; - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::spanned_external(bare, (start, end)), - )) - }) -} - -pub fn bare(input: NomSpan) -> IResult { - trace_step(input, "bare", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_start_bare_char)(input)?; - let (input, _) = take_while(is_bare_char)(input)?; - let end = input.offset; - - Ok((input, TokenTreeBuilder::spanned_bare((start, end)))) - }) -} - -pub fn var(input: NomSpan) -> IResult { - trace_step(input, "var", move |input| { - let start = input.offset; - let (input, _) = tag("$")(input)?; - let (input, bare) = member(input)?; - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::spanned_var(bare.span(), (start, end)), - )) - }) -} - -pub fn member(input: NomSpan) -> IResult { - trace_step(input, "identifier", move |input| { - let start = input.offset; - let (input, _) = take_while1(is_id_start)(input)?; - let (input, _) = take_while(is_id_continue)(input)?; - - let end = input.offset; - - Ok((input, TokenTreeBuilder::spanned_member((start, end)))) - }) -} - -pub fn flag(input: NomSpan) -> IResult { - trace_step(input, "flag", move |input| { - let start = input.offset; - let (input, _) = tag("--")(input)?; - let (input, bare) = bare(input)?; - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::spanned_flag(bare.span(), (start, end)), - )) - }) -} - -pub fn shorthand(input: NomSpan) -> IResult { - trace_step(input, "shorthand", move |input| { - let start = input.offset; - let (input, _) = tag("-")(input)?; - let (input, bare) = bare(input)?; - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::spanned_shorthand(bare.span(), (start, end)), - )) - }) -} - -pub fn raw_unit(input: NomSpan) -> IResult> { - trace_step(input, "raw_unit", move |input| { - let start = input.offset; - let (input, unit) = alt(( - tag("B"), - tag("b"), - tag("KB"), - tag("kb"), - tag("Kb"), - tag("K"), - tag("k"), - tag("MB"), - tag("mb"), - tag("Mb"), - tag("GB"), - tag("gb"), - tag("Gb"), - tag("TB"), - tag("tb"), - tag("Tb"), - tag("PB"), - tag("pb"), - tag("Pb"), - ))(input)?; - let end = input.offset; - - Ok(( - input, - Tagged::from_simple_spanned_item(Unit::from(unit.fragment), (start, end)), - )) - }) -} - -pub fn size(input: NomSpan) -> IResult { - trace_step(input, "size", move |input| { - let mut is_size = false; - let start = input.offset; - let (input, number) = raw_number(input)?; - if let Ok((input, Some(size))) = opt(raw_unit)(input) { - let end = input.offset; - - // Check to make sure there is no trailing parseable characters - if let Ok((input, Some(extra))) = opt(bare)(input) { - return Err(nom::Err::Error((input, nom::error::ErrorKind::Char))); - } - - Ok(( - input, - TokenTreeBuilder::spanned_size((number.item, *size), (start, end)), - )) - } else { - let end = input.offset; - - // Check to make sure there is no trailing parseable characters - if let Ok((input, Some(extra))) = opt(bare)(input) { - return Err(nom::Err::Error((input, nom::error::ErrorKind::Char))); - } - - Ok(( - input, - TokenTreeBuilder::spanned_number(number.item, number.tag), - )) + match input.fragment.chars().next() { + None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))), + Some('.') => (), + other if is_boundary(other) => { + return Ok((input, RawNumber::int(Span::new(start, input.offset)))) } - }) + _ => { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Tag, + ))) + } + } + + let dot: IResult = tag(".")(input); + + let input = match dot { + Ok((input, dot)) => input, + + // it's just an integer + Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))), + }; + + let (input, tail) = digit1(input)?; + + let end = input.offset; + + let next = input.fragment.chars().next(); + + if is_boundary(next) { + Ok((input, RawNumber::decimal(Span::new(start, end)))) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Tag, + ))) + } } +#[tracable_parser] +pub fn operator(input: NomSpan) -> IResult { + let (input, operator) = alt((gte, lte, neq, gt, lt, eq))(input)?; + + Ok((input, operator)) +} + +#[tracable_parser] +pub fn dq_string(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = char('"')(input)?; + let start1 = input.offset; + let (input, _) = many0(none_of("\""))(input)?; + let end1 = input.offset; + let (input, _) = char('"')(input)?; + let end = input.offset; + Ok(( + input, + TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)), + )) +} + +#[tracable_parser] +pub fn sq_string(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = char('\'')(input)?; + let start1 = input.offset; + let (input, _) = many0(none_of("\'"))(input)?; + let end1 = input.offset; + let (input, _) = char('\'')(input)?; + let end = input.offset; + + Ok(( + input, + TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)), + )) +} + +#[tracable_parser] +pub fn string(input: NomSpan) -> IResult { + alt((sq_string, dq_string))(input) +} + +#[tracable_parser] +pub fn external(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = tag("^")(input)?; + let (input, bare) = take_while(is_bare_char)(input)?; + let end = input.offset; + + Ok(( + input, + TokenTreeBuilder::spanned_external_command(bare, Span::new(start, end)), + )) +} + +#[tracable_parser] +pub fn pattern(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = take_while1(is_start_glob_char)(input)?; + let (input, _) = take_while(is_glob_char)(input)?; + + let next_char = &input.fragment.chars().nth(0); + + if let Some(next_char) = next_char { + if is_external_word_char(*next_char) { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::TakeWhile1, + ))); + } + } + + let end = input.offset; + + Ok(( + input, + TokenTreeBuilder::spanned_pattern(Span::new(start, end)), + )) +} + +#[tracable_parser] +pub fn bare(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = take_while1(is_start_bare_char)(input)?; + let (input, last) = take_while(is_bare_char)(input)?; + + let next_char = &input.fragment.chars().nth(0); + let prev_char = last.fragment.chars().nth(0); + + if let Some(next_char) = next_char { + if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) { + return Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::TakeWhile1, + ))); + } + } + + let end = input.offset; + + Ok((input, TokenTreeBuilder::spanned_bare(Span::new(start, end)))) +} + +#[tracable_parser] +pub fn external_word(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = take_while1(is_external_word_char)(input)?; + let end = input.offset; + + Ok(( + input, + TokenTreeBuilder::spanned_external_word(Span::new(start, end)), + )) +} + +#[tracable_parser] +pub fn var(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = tag("$")(input)?; + let (input, bare) = ident(input)?; + let end = input.offset; + + Ok(( + input, + TokenTreeBuilder::spanned_var(bare, Span::new(start, end)), + )) +} + +#[tracable_parser] +pub fn ident(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = take_while1(is_start_bare_char)(input)?; + let (input, _) = take_while(is_bare_char)(input)?; + let end = input.offset; + + Ok((input, Tag::from((start, end, None)))) +} + +#[tracable_parser] +pub fn flag(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = tag("--")(input)?; + let (input, bare) = bare(input)?; + let end = input.offset; + + Ok(( + input, + TokenTreeBuilder::spanned_flag(bare.span(), Span::new(start, end)), + )) +} + +#[tracable_parser] +pub fn shorthand(input: NomSpan) -> IResult { + let start = input.offset; + let (input, _) = tag("-")(input)?; + let (input, bare) = bare(input)?; + let end = input.offset; + + Ok(( + input, + TokenTreeBuilder::spanned_shorthand(bare.span(), Span::new(start, end)), + )) +} + +#[tracable_parser] pub fn leaf(input: NomSpan) -> IResult { - trace_step(input, "leaf", move |input| { - let (input, node) = - alt((size, string, operator, flag, shorthand, var, external, bare))(input)?; + let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?; - Ok((input, node)) - }) + Ok((input, node)) } -pub fn token_list(input: NomSpan) -> IResult> { - trace_step(input, "token_list", move |input| { - let (input, first) = node(input)?; - let (input, list) = many0(pair(space1, node))(input)?; +#[tracable_parser] +pub fn token_list(input: NomSpan) -> IResult>> { + let start = input.offset; + let (input, first) = node(input)?; - Ok((input, make_token_list(None, first, list, None))) - }) + let (input, mut list) = many0(pair(alt((whitespace, dot)), node))(input)?; + + let end = input.offset; + + Ok(( + input, + make_token_list(first, list, None).spanned(Span::new(start, end)), + )) } -pub fn spaced_token_list(input: NomSpan) -> IResult> { - trace_step(input, "spaced_token_list", move |input| { - let (input, sp_left) = opt(space1)(input)?; - let (input, first) = node(input)?; - let (input, list) = many0(pair(space1, node))(input)?; - let (input, sp_right) = opt(space1)(input)?; +#[tracable_parser] +pub fn spaced_token_list(input: NomSpan) -> IResult>> { + let start = input.offset; + let (input, pre_ws) = opt(whitespace)(input)?; + let (input, items) = token_list(input)?; + let (input, post_ws) = opt(whitespace)(input)?; + let end = input.offset; - Ok((input, make_token_list(sp_left, first, list, sp_right))) - }) + let mut out = vec![]; + + out.extend(pre_ws); + out.extend(items.item); + out.extend(post_ws); + + Ok((input, out.spanned(Span::new(start, end)))) } fn make_token_list( - sp_left: Option, - first: TokenNode, - list: Vec<(NomSpan, TokenNode)>, - sp_right: Option, + first: Vec, + list: Vec<(TokenNode, Vec)>, + sp_right: Option, ) -> Vec { let mut nodes = vec![]; - if let Some(sp_left) = sp_left { - nodes.push(TokenNode::Whitespace(Span::from(sp_left))); - } + nodes.extend(first); - nodes.push(first); - - for (ws, token) in list { - nodes.push(TokenNode::Whitespace(Span::from(ws))); - nodes.push(token); + for (left, right) in list { + nodes.push(left); + nodes.extend(right); } if let Some(sp_right) = sp_right { - nodes.push(TokenNode::Whitespace(Span::from(sp_right))); + nodes.push(sp_right); } nodes } +#[tracable_parser] pub fn whitespace(input: NomSpan) -> IResult { - trace_step(input, "whitespace", move |input| { - let left = input.offset; - let (input, ws1) = space1(input)?; - let right = input.offset; + let left = input.offset; + let (input, ws1) = space1(input)?; + let right = input.offset; - Ok((input, TokenTreeBuilder::spanned_ws((left, right)))) - }) + Ok((input, TokenTreeBuilder::spanned_ws(Span::new(left, right)))) } -pub fn delimited_paren(input: NomSpan) -> IResult { - trace_step(input, "delimited_paren", move |input| { - let left = input.offset; - let (input, _) = char('(')(input)?; - let (input, ws1) = opt(whitespace)(input)?; - let (input, inner_items) = opt(token_list)(input)?; - let (input, ws2) = opt(whitespace)(input)?; - let (input, _) = char(')')(input)?; - let right = input.offset; +pub fn delimited( + input: NomSpan, + delimiter: Delimiter, +) -> IResult>)> { + let left = input.offset; + let (input, open_span) = tag(delimiter.open())(input)?; + let (input, inner_items) = opt(spaced_token_list)(input)?; + let (input, close_span) = tag(delimiter.close())(input)?; + let right = input.offset; - let mut items = vec![]; + let mut items = vec![]; - if let Some(space) = ws1 { - items.push(space); - } + if let Some(inner_items) = inner_items { + items.extend(inner_items.item); + } - if let Some(inner_items) = inner_items { - items.extend(inner_items); - } - - if let Some(space) = ws2 { - items.push(space); - } - - Ok(( - input, - TokenTreeBuilder::spanned_parens(items, (left, right)), - )) - }) -} - -pub fn delimited_square(input: NomSpan) -> IResult { - trace_step(input, "delimited_paren", move |input| { - let left = input.offset; - let (input, _) = char('[')(input)?; - let (input, ws1) = opt(whitespace)(input)?; - let (input, inner_items) = opt(token_list)(input)?; - let (input, ws2) = opt(whitespace)(input)?; - let (input, _) = char(']')(input)?; - let right = input.offset; - - let mut items = vec![]; - - if let Some(space) = ws1 { - items.push(space); - } - - if let Some(inner_items) = inner_items { - items.extend(inner_items); - } - - if let Some(space) = ws2 { - items.push(space); - } - - Ok(( - input, - TokenTreeBuilder::spanned_square(items, (left, right)), - )) - }) -} - -pub fn delimited_brace(input: NomSpan) -> IResult { - trace_step(input, "delimited_brace", move |input| { - let left = input.offset; - let (input, _) = char('{')(input)?; - let (input, _) = opt(space1)(input)?; - let (input, items) = opt(token_list)(input)?; - let (input, _) = opt(space1)(input)?; - let (input, _) = char('}')(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::spanned_brace(items.unwrap_or_else(|| vec![]), (left, right)), - )) - }) -} - -pub fn raw_call(input: NomSpan) -> IResult> { - trace_step(input, "raw_call", move |input| { - let left = input.offset; - let (input, items) = token_list(input)?; - let right = input.offset; - - Ok((input, TokenTreeBuilder::spanned_call(items, (left, right)))) - }) -} - -pub fn path(input: NomSpan) -> IResult { - trace_step(input, "path", move |input| { - let left = input.offset; - let (input, head) = node1(input)?; - let (input, _) = tag(".")(input)?; - let (input, tail) = separated_list(tag("."), alt((member, string)))(input)?; - let right = input.offset; - - Ok(( - input, - TokenTreeBuilder::spanned_path((head, tail), (left, right)), - )) - }) -} - -pub fn node1(input: NomSpan) -> IResult { - trace_step(input, "node1", alt((leaf, delimited_paren))) -} - -pub fn node(input: NomSpan) -> IResult { - trace_step( + Ok(( input, - "node", - alt(( - path, - leaf, - delimited_paren, - delimited_brace, - delimited_square, - )), - ) + ( + Span::from(open_span), + Span::from(close_span), + items.spanned(Span::new(left, right)), + ), + )) } +#[tracable_parser] +pub fn delimited_paren(input: NomSpan) -> IResult { + let (input, (left, right, tokens)) = delimited(input, Delimiter::Paren)?; + + Ok(( + input, + TokenTreeBuilder::spanned_parens(tokens.item, (left, right), tokens.span), + )) +} + +#[tracable_parser] +pub fn delimited_square(input: NomSpan) -> IResult { + let (input, (left, right, tokens)) = delimited(input, Delimiter::Square)?; + + Ok(( + input, + TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span), + )) +} + +#[tracable_parser] +pub fn delimited_brace(input: NomSpan) -> IResult { + let (input, (left, right, tokens)) = delimited(input, Delimiter::Brace)?; + + Ok(( + input, + TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span), + )) +} + +#[tracable_parser] +pub fn raw_call(input: NomSpan) -> IResult> { + let left = input.offset; + let (input, items) = token_list(input)?; + let right = input.offset; + + Ok(( + input, + TokenTreeBuilder::spanned_call(items.item, Span::new(left, right)), + )) +} + +#[tracable_parser] +pub fn bare_path(input: NomSpan) -> IResult> { + let (input, head) = alt((bare, dot))(input)?; + + let (input, tail) = many0(alt((bare, dot, string)))(input)?; + + let next_char = &input.fragment.chars().nth(0); + + if is_boundary(*next_char) { + let mut result = vec![head]; + result.extend(tail); + + Ok((input, result)) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Many0, + ))) + } +} + +#[tracable_parser] +pub fn pattern_path(input: NomSpan) -> IResult> { + let (input, head) = alt((pattern, dot))(input)?; + + let (input, tail) = many0(alt((pattern, dot, string)))(input)?; + + let next_char = &input.fragment.chars().nth(0); + + if is_boundary(*next_char) { + let mut result = vec![head]; + result.extend(tail); + + Ok((input, result)) + } else { + Err(nom::Err::Error(nom::error::make_error( + input, + nom::error::ErrorKind::Many0, + ))) + } +} + +#[tracable_parser] +pub fn node1(input: NomSpan) -> IResult { + alt((leaf, bare, pattern, external_word, delimited_paren))(input) +} + +#[tracable_parser] +pub fn node(input: NomSpan) -> IResult> { + alt(( + to_list(leaf), + bare_path, + pattern_path, + to_list(external_word), + to_list(delimited_paren), + to_list(delimited_brace), + to_list(delimited_square), + ))(input) +} + +fn to_list( + parser: impl Fn(NomSpan) -> IResult, +) -> impl Fn(NomSpan) -> IResult> { + move |input| { + let (input, next) = parser(input)?; + + Ok((input, vec![next])) + } +} + +#[tracable_parser] +pub fn nodes(input: NomSpan) -> IResult { + let (input, tokens) = token_list(input)?; + + Ok(( + input, + TokenTreeBuilder::spanned_token_list(tokens.item, tokens.span), + )) +} + +#[tracable_parser] pub fn pipeline(input: NomSpan) -> IResult { - trace_step(input, "pipeline", |input| { - let start = input.offset; - let (input, head) = opt(tuple((raw_call, opt(space1), opt(tag("|")))))(input)?; - let (input, items) = trace_step( + let start = input.offset; + let (input, head) = spaced_token_list(input)?; + let (input, items) = many0(tuple((tag("|"), spaced_token_list)))(input)?; + + if input.input_len() != 0 { + return Err(Err::Error(error_position!( input, - "many0", - many0(tuple((opt(space1), raw_call, opt(space1), opt(tag("|"))))), - )?; - - let (input, tail) = opt(space1)(input)?; - let (input, newline) = opt(multispace1)(input)?; - - if input.input_len() != 0 { - return Err(Err::Error(error_position!( - input, - nom::error::ErrorKind::Eof - ))); - } - - let end = input.offset; - - Ok(( - input, - TokenTreeBuilder::spanned_pipeline( - (make_call_list(head, items), tail.map(Span::from)), - (start, end), - ), - )) - }) -} - -fn make_call_list( - head: Option<(Tagged, Option, Option)>, - items: Vec<( - Option, - Tagged, - Option, - Option, - )>, -) -> Vec { - let mut out = vec![]; - - if let Some(head) = head { - let el = PipelineElement::new(None, head.0, head.1.map(Span::from), head.2.map(Span::from)); - out.push(el); + nom::error::ErrorKind::Eof + ))); } - for (ws1, call, ws2, pipe) in items { - let el = PipelineElement::new( - ws1.map(Span::from), - call, - ws2.map(Span::from), - pipe.map(Span::from), - ); - out.push(el); - } + let end = input.offset; - out + let head_span = head.span; + let mut all_items: Vec> = + vec![PipelineElement::new(None, head).spanned(head_span)]; + + all_items.extend(items.into_iter().map(|(pipe, items)| { + let items_span = items.span; + PipelineElement::new(Some(Span::from(pipe)), items) + .spanned(Span::from(pipe).until(items_span)) + })); + + Ok(( + input, + TokenTreeBuilder::spanned_pipeline(all_items, Span::new(start, end)), + )) } fn int(frag: &str, neg: Option) -> i64 { @@ -619,40 +642,62 @@ fn int(frag: &str, neg: Option) -> i64 { } } +fn is_boundary(c: Option) -> bool { + match c { + None => true, + Some(')') | Some(']') | Some('}') => true, + Some(c) if c.is_whitespace() => true, + _ => false, + } +} + +fn is_external_word_char(c: char) -> bool { + match c { + ';' | '|' | '#' | '-' | '"' | '\'' | '$' | '(' | ')' | '[' | ']' | '{' | '}' | '`' + | '.' => false, + other if other.is_whitespace() => false, + _ => true, + } +} + +/// These characters appear in globs and not bare words +fn is_glob_specific_char(c: char) -> bool { + c == '*' || c == '?' +} + +fn is_start_glob_char(c: char) -> bool { + is_start_bare_char(c) || is_glob_specific_char(c) || c == '.' +} + +fn is_glob_char(c: char) -> bool { + is_bare_char(c) || is_glob_specific_char(c) +} + fn is_start_bare_char(c: char) -> bool { match c { - _ if c.is_alphabetic() => true, - _ if c.is_numeric() => true, - '.' => true, + '+' => false, + _ if c.is_alphanumeric() => true, '\\' => true, '/' => true, '_' => true, '-' => true, - '@' => true, - '*' => true, - '?' => true, '~' => true, - '+' => true, _ => false, } } fn is_bare_char(c: char) -> bool { match c { + '+' => true, _ if c.is_alphanumeric() => true, - ':' => true, - '.' => true, '\\' => true, '/' => true, '_' => true, '-' => true, - '@' => true, - '*' => true, - '?' => true, '=' => true, '~' => true, - '+' => true, - '%' => true, + ':' => true, + '?' => true, _ => false, } } @@ -671,6 +716,16 @@ fn is_id_continue(c: char) -> bool { } } +fn is_member_start(c: char) -> bool { + match c { + '"' | '\'' => true, + '1'..='9' => true, + + other if is_id_start(other) => true, + _ => false, + } +} + #[cfg(test)] mod tests { use super::*; @@ -680,487 +735,547 @@ mod tests { pub type CurriedNode = Box T + 'static>; - macro_rules! assert_leaf { - (parsers [ $($name:tt)* ] $input:tt -> $left:tt .. $right:tt { $kind:tt $parens:tt } ) => { - $( - assert_eq!( - apply($name, stringify!($name), $input), - token(RawToken::$kind $parens, $left, $right) - ); - )* + macro_rules! equal_tokens { + ($source:tt -> $tokens:expr) => { + let result = apply(pipeline, "pipeline", $source); + let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens); - assert_eq!( - apply(leaf, "leaf", $input), - token(RawToken::$kind $parens, $left, $right) - ); + if result != expected_tree { + let debug_result = format!("{}", result.debug($source)); + let debug_expected = format!("{}", expected_tree.debug(&expected_source)); - assert_eq!( - apply(leaf, "leaf", $input), - token(RawToken::$kind $parens, $left, $right) - ); - - assert_eq!( - apply(node, "node", $input), - token(RawToken::$kind $parens, $left, $right) - ); + if debug_result == debug_expected { + assert_eq!( + result, expected_tree, + "NOTE: actual and expected had equivalent debug serializations, source={:?}, debug_expected={:?}", + $source, + debug_expected + ) + } else { + assert_eq!(debug_result, debug_expected) + } + } + }; + + (<$parser:tt> $source:tt -> $tokens:expr) => { + let result = apply($parser, stringify!($parser), $source); + let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens); + + if result != expected_tree { + let debug_result = format!("{}", result.debug($source)); + let debug_expected = format!("{}", expected_tree.debug(&expected_source)); + + if debug_result == debug_expected { + assert_eq!( + result, expected_tree, + "NOTE: actual and expected had equivalent debug serializations, source={:?}, debug_expected={:?}", + $source, + debug_expected + ) + } else { + assert_eq!(debug_result, debug_expected) + } + } }; - (parsers [ $($name:tt)* ] $input:tt -> $left:tt .. $right:tt { $kind:tt } ) => { - $( - assert_eq!( - apply($name, stringify!($name), $input), - token(RawToken::$kind, $left, $right) - ); - )* - } } #[test] fn test_integer() { - assert_leaf! { - parsers [ size ] - "123" -> 0..3 { Number(RawNumber::int((0, 3)).item) } + equal_tokens! { + + "123" -> b::token_list(vec![b::int(123)]) } - assert_leaf! { - parsers [ size ] - "-123" -> 0..4 { Number(RawNumber::int((0, 4)).item) } - } - } - - #[test] - fn test_size() { - assert_leaf! { - parsers [ size ] - "123MB" -> 0..5 { Size(RawNumber::int((0, 3)).item, Unit::MB) } - } - - assert_leaf! { - parsers [ size ] - "10GB" -> 0..4 { Size(RawNumber::int((0, 2)).item, Unit::GB) } + equal_tokens! { + + "-123" -> b::token_list(vec![b::int(-123)]) } } #[test] fn test_operator() { - assert_eq!(apply(node, "node", ">"), build_token(b::op(">"))); + equal_tokens! { + + ">" -> b::token_list(vec![b::op(">")]) + } - // assert_leaf! { - // parsers [ operator ] - // ">=" -> 0..2 { Operator(Operator::GreaterThanOrEqual) } - // } + equal_tokens! { + + ">=" -> b::token_list(vec![b::op(">=")]) + } - // assert_leaf! { - // parsers [ operator ] - // "<" -> 0..1 { Operator(Operator::LessThan) } - // } + equal_tokens! { + + "<" -> b::token_list(vec![b::op("<")]) + } - // assert_leaf! { - // parsers [ operator ] - // "<=" -> 0..2 { Operator(Operator::LessThanOrEqual) } - // } + equal_tokens! { + + "<=" -> b::token_list(vec![b::op("<=")]) + } - // assert_leaf! { - // parsers [ operator ] - // "==" -> 0..2 { Operator(Operator::Equal) } - // } + equal_tokens! { + + "==" -> b::token_list(vec![b::op("==")]) + } - // assert_leaf! { - // parsers [ operator ] - // "!=" -> 0..2 { Operator(Operator::NotEqual) } - // } + equal_tokens! { + + "!=" -> b::token_list(vec![b::op("!=")]) + } } #[test] fn test_string() { - assert_leaf! { - parsers [ string dq_string ] - r#""hello world""# -> 0..13 { String(span(1, 12)) } + equal_tokens! { + + r#""hello world""# -> b::token_list(vec![b::string("hello world")]) } - assert_leaf! { - parsers [ string sq_string ] - r"'hello world'" -> 0..13 { String(span(1, 12)) } + equal_tokens! { + + r#"'hello world'"# -> b::token_list(vec![b::string("hello world")]) } } #[test] fn test_bare() { - assert_leaf! { - parsers [ bare ] - "hello" -> 0..5 { Bare } + equal_tokens! { + + "hello" -> b::token_list(vec![b::bare("hello")]) + } + } + + #[test] + fn test_unit_sizes() { + equal_tokens! { + + "450MB" -> b::token_list(vec![b::bare("450MB")]) + } + } + #[test] + fn test_simple_path() { + equal_tokens! { + + "chrome.exe" -> b::token_list(vec![b::bare("chrome"), b::op(Operator::Dot), b::bare("exe")]) } - assert_leaf! { - parsers [ bare ] - "chrome.exe" -> 0..10 { Bare } + equal_tokens! { + + ".azure" -> b::token_list(vec![b::op(Operator::Dot), b::bare("azure")]) } - assert_leaf! { - parsers [ bare ] - r"C:\windows\system.dll" -> 0..21 { Bare } + equal_tokens! { + + r"C:\windows\system.dll" -> b::token_list(vec![b::bare(r"C:\windows\system"), b::op(Operator::Dot), b::bare("dll")]) } - assert_leaf! { - parsers [ bare ] - r"C:\Code\-testing\my_tests.js" -> 0..28 { Bare } + equal_tokens! { + + r"C:\Code\-testing\my_tests.js" -> b::token_list(vec![b::bare(r"C:\Code\-testing\my_tests"), b::op(Operator::Dot), b::bare("js")]) } } #[test] fn test_flag() { - // assert_leaf! { - // parsers [ flag ] - // "--hello" -> 0..7 { Flag(Tagged::from_item(FlagKind::Longhand, span(2, 7))) } - // } + equal_tokens! { + + "--amigos" -> b::token_list(vec![b::flag("arepas")]) + } - // assert_leaf! { - // parsers [ flag ] - // "--hello-world" -> 0..13 { Flag(Tagged::from_item(FlagKind::Longhand, span(2, 13))) } - // } + equal_tokens! { + + "--all-amigos" -> b::token_list(vec![b::flag("all-amigos")]) + } } #[test] - fn test_shorthand() { - // assert_leaf! { - // parsers [ shorthand ] - // "-alt" -> 0..4 { Flag(Tagged::from_item(FlagKind::Shorthand, span(1, 4))) } - // } + fn test_shorthand_flag() { + equal_tokens! { + + "-katz" -> b::token_list(vec![b::shorthand("katz")]) + } } #[test] fn test_variable() { - assert_leaf! { - parsers [ var ] - "$it" -> 0..3 { Variable(span(1, 3)) } + equal_tokens! { + + "$it" -> b::token_list(vec![b::var("it")]) } - assert_leaf! { - parsers [ var ] - "$name" -> 0..5 { Variable(span(1, 5)) } + equal_tokens! { + + "$name" -> b::token_list(vec![b::var("name")]) } } #[test] fn test_external() { - assert_leaf! { - parsers [ external ] - "^ls" -> 0..3 { External(span(1, 3)) } + equal_tokens! { + + "^ls" -> b::token_list(vec![b::external_command("ls")]) + } + } + + #[test] + fn test_dot_prefixed_name() { + equal_tokens! { + + ".azure" -> b::token_list(vec![b::op("."), b::bare("azure")]) } } #[test] fn test_delimited_paren() { - assert_eq!( - apply(node, "node", "(abc)"), - build_token(b::parens(vec![b::bare("abc")])) - ); + equal_tokens! { + + "(abc)" -> b::token_list(vec![b::parens(vec![b::bare("abc")])]) + } - assert_eq!( - apply(node, "node", "( abc )"), - build_token(b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])) - ); + equal_tokens! { + + "( abc )" -> b::token_list(vec![b::parens(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "( abc def )"), - build_token(b::parens(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp() - ])) - ); + equal_tokens! { + + "( abc def )" -> b::token_list(vec![b::parens(vec![b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp()])]) + } - assert_eq!( - apply(node, "node", "( abc def 123 456GB )"), - build_token(b::parens(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp(), - b::int(123), - b::sp(), - b::size(456, "GB"), - b::sp() - ])) - ); + equal_tokens! { + + "( abc def 123 456GB )" -> b::token_list(vec![b::parens(vec![ + b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp(), b::int(123), b::sp(), b::bare("456GB"), b::sp() + ])]) + } } #[test] fn test_delimited_square() { - assert_eq!( - apply(node, "node", "[abc]"), - build_token(b::square(vec![b::bare("abc")])) - ); + equal_tokens! { + + "[abc]" -> b::token_list(vec![b::square(vec![b::bare("abc")])]) + } - assert_eq!( - apply(node, "node", "[ abc ]"), - build_token(b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])) - ); + equal_tokens! { + + "[ abc ]" -> b::token_list(vec![b::square(vec![b::ws(" "), b::bare("abc"), b::ws(" ")])]) + } - assert_eq!( - apply(node, "node", "[ abc def ]"), - build_token(b::square(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp() - ])) - ); + equal_tokens! { + + "[ abc def ]" -> b::token_list(vec![b::square(vec![b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp()])]) + } - assert_eq!( - apply(node, "node", "[ abc def 123 456GB ]"), - build_token(b::square(vec![ - b::ws(" "), - b::bare("abc"), - b::sp(), - b::bare("def"), - b::sp(), - b::int(123), - b::sp(), - b::size(456, "GB"), - b::sp() - ])) - ); + equal_tokens! { + + "[ abc def 123 456GB ]" -> b::token_list(vec![b::square(vec![ + b::ws(" "), b::bare("abc"), b::sp(), b::bare("def"), b::sp(), b::int(123), b::sp(), b::bare("456GB"), b::sp() + ])]) + } } #[test] fn test_path() { let _ = pretty_env_logger::try_init(); - assert_eq!( - apply(node, "node", "$it.print"), - build_token(b::path(b::var("it"), vec![b::member("print")])) - ); - assert_eq!( - apply(node, "node", "$head.part1.part2"), - build_token(b::path( - b::var("head"), - vec![b::member("part1"), b::member("part2")] - )) - ); + equal_tokens! { + + "$it.print" -> b::token_list(vec![b::var("it"), b::op("."), b::bare("print")]) + } - assert_eq!( - apply(node, "node", "( hello ).world"), - build_token(b::path( - b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), - vec![b::member("world")] - )) - ); + equal_tokens! { + + "$it.0" -> b::token_list(vec![b::var("it"), b::op("."), b::int(0)]) + } - assert_eq!( - apply(node, "node", "( hello ).\"world\""), - build_token(b::path( - b::parens(vec![b::sp(), b::bare("hello"), b::sp()],), - vec![b::string("world")] - )) - ); + equal_tokens! { + + "$head.part1.part2" -> b::token_list(vec![b::var("head"), b::op("."), b::bare("part1"), b::op("."), b::bare("part2")]) + } + + equal_tokens! { + + "( hello ).world" -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::op("."), b::bare("world")]) + } + + equal_tokens! { + + r#"( hello )."world""# -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::op("."), b::string("world")]) + } } #[test] fn test_nested_path() { - assert_eq!( - apply( - node, - "node", - "( $it.is.\"great news\".right yep $yep ).\"world\"" - ), - build_token(b::path( - b::parens(vec![ - b::sp(), - b::path( + equal_tokens! { + + r#"( $it.is."great news".right yep $yep )."world""# -> b::token_list( + vec![ + b::parens(vec![ + b::sp(), b::var("it"), - vec![b::member("is"), b::string("great news"), b::member("right")] - ), - b::sp(), - b::bare("yep"), - b::sp(), - b::var("yep"), - b::sp() - ]), - vec![b::string("world")] - )) - ) + b::op("."), + b::bare("is"), + b::op("."), + b::string("great news"), + b::op("."), + b::bare("right"), + b::sp(), + b::bare("yep"), + b::sp(), + b::var("yep"), + b::sp() + ]), + b::op("."), b::string("world")] + ) + } + + equal_tokens! { + + r#"$it."are PAS".0"# -> b::token_list( + vec![ + b::var("it"), + b::op("."), + b::string("are PAS"), + b::op("."), + b::int(0), + ] + ) + } } #[test] fn test_smoke_single_command() { - assert_eq!( - apply(raw_call, "raw_call", "git add ."), - build(b::call( - b::bare("git"), - vec![b::sp(), b::bare("add"), b::sp(), b::bare(".")] - )) - ); + equal_tokens! { + + "git add ." -> b::token_list(vec![b::bare("git"), b::sp(), b::bare("add"), b::sp(), b::op(".")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml")] - )) - ); + equal_tokens! { + + "open Cargo.toml" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml")]) + } - assert_eq!( - apply(raw_call, "raw_call", "select package.version"), - build(b::call( - b::bare("select"), - vec![b::sp(), b::bare("package.version")] - )) - ); + equal_tokens! { + + "select package.version" -> b::token_list(vec![b::bare("select"), b::sp(), b::bare("package"), b::op("."), b::bare("version")]) + } - assert_eq!( - apply(raw_call, "raw_call", "echo $it"), - build(b::call(b::bare("echo"), vec![b::sp(), b::var("it")])) - ); + equal_tokens! { + + "echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::var("it")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml --raw"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml"), b::sp(), b::flag("raw")] - )) - ); + equal_tokens! { + + "open Cargo.toml --raw" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml"), b::sp(), b::flag("raw")]) + } - assert_eq!( - apply(raw_call, "raw_call", "open Cargo.toml -r"), - build(b::call( - b::bare("open"), - vec![b::sp(), b::bare("Cargo.toml"), b::sp(), b::shorthand("r")] - )) - ); + equal_tokens! { + + "open Cargo.toml -r" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml"), b::sp(), b::shorthand("r")]) + } - assert_eq!( - apply(raw_call, "raw_call", "config --set tabs 2"), - build(b::call( - b::bare("config"), + equal_tokens! { + + "config --set tabs 2" -> b::token_list(vec![b::bare("config"), b::sp(), b::flag("set"), b::sp(), b::bare("tabs"), b::sp(), b::int(2)]) + } + + equal_tokens! { + + "inc --patch package.version" -> b::token_list( vec![ + b::bare("inc"), b::sp(), - b::flag("set"), + b::flag("patch"), b::sp(), - b::bare("tabs"), - b::sp(), - b::int(2) + b::bare("package"), b::op("."), b::bare("version") ] - )) + ) + } + } + + #[test] + fn test_external_word() { + let _ = pretty_env_logger::try_init(); + + equal_tokens!( + "cargo +nightly run" -> + b::pipeline(vec![vec![ + b::bare("cargo"), + b::sp(), + b::external_word("+nightly"), + b::sp(), + b::bare("run") + ]]) + ); + + equal_tokens!( + "rm foo%bar" -> + b::pipeline(vec![vec![ + b::bare("rm"), b::sp(), b::external_word("foo%bar") + ]]) + ); + + equal_tokens!( + "rm foo%bar" -> + b::pipeline(vec![vec![ + b::bare("rm"), b::sp(), b::external_word("foo%bar"), + ]]) ); } #[test] - fn test_smoke_pipeline() { + fn test_pipeline() { let _ = pretty_env_logger::try_init(); - assert_eq!( - apply( - pipeline, - "pipeline", - r#"git branch --merged | split-row "`n" | where $it != "* master""# - ), - build_token(b::pipeline(vec![ - ( - None, - b::call( - b::bare("git"), - vec![b::sp(), b::bare("branch"), b::sp(), b::flag("merged")] - ), - Some(" ") - ), - ( - Some(" "), - b::call(b::bare("split-row"), vec![b::sp(), b::string("`n")]), - Some(" ") - ), - ( - Some(" "), - b::call( - b::bare("where"), - vec![ - b::sp(), - b::var("it"), - b::sp(), - b::op("!="), - b::sp(), - b::string("* master") - ] - ), - None - ) - ])) - ); - - assert_eq!( - apply(pipeline, "pipeline", "ls | where { $it.size > 100 }"), - build_token(b::pipeline(vec![ - (None, b::call(b::bare("ls"), vec![]), Some(" ")), - ( - Some(" "), - b::call( - b::bare("where"), - vec![ - b::sp(), - b::braced(vec![ - b::path(b::var("it"), vec![b::member("size")]), - b::sp(), - b::op(">"), - b::sp(), - b::int(100) - ]) - ] - ), - None - ) - ])) - ) - } - - fn apply( - f: impl Fn(NomSpan) -> Result<(NomSpan, T), nom::Err<(NomSpan, nom::error::ErrorKind)>>, - desc: &str, - string: &str, - ) -> T { - match f(NomSpan::new(string)) { - Ok(v) => v.1, - Err(other) => { - println!("{:?}", other); - println!("for {} @ {}", string, desc); - panic!("No dice"); - } + equal_tokens! { + "sys | echo" -> b::pipeline(vec![ + vec![ + b::bare("sys"), b::sp() + ], + vec![ + b::sp(), b::bare("echo") + ] + ]) } } - fn span(left: usize, right: usize) -> Span { - Span::from((left, right)) + #[test] + fn test_patterns() { + equal_tokens! { + + "cp ../formats/*" -> b::pipeline(vec![vec![b::bare("cp"), b::sp(), b::pattern("../formats/*")]]) + } + + equal_tokens! { + + "cp * /dev/null" -> b::pipeline(vec![vec![b::bare("cp"), b::sp(), b::pattern("*"), b::sp(), b::bare("/dev/null")]]) + } + } + + #[test] + fn test_pseudo_paths() { + let _ = pretty_env_logger::try_init(); + + equal_tokens!( + + r#"sys | where cpu."max ghz" > 1"# -> b::pipeline(vec![ + vec![ + b::bare("sys"), b::sp() + ], + vec![ + b::sp(), + b::bare("where"), + b::sp(), + b::bare("cpu"), + b::op("."), + b::string("max ghz"), + b::sp(), + b::op(">"), + b::sp(), + b::int(1) + ]]) + ); + } + + // #[test] + // fn test_smoke_pipeline() { + // let _ = pretty_env_logger::try_init(); + + // assert_eq!( + // apply( + // pipeline, + // "pipeline", + // r#"git branch --merged | split-row "`n" | where $it != "* master""# + // ), + // build_token(b::pipeline(vec![ + // ( + // None, + // b::call( + // b::bare("git"), + // vec![b::sp(), b::bare("branch"), b::sp(), b::flag("merged")] + // ), + // Some(" ") + // ), + // ( + // Some(" "), + // b::call(b::bare("split-row"), vec![b::sp(), b::string("`n")]), + // Some(" ") + // ), + // ( + // Some(" "), + // b::call( + // b::bare("where"), + // vec![ + // b::sp(), + // b::var("it"), + // b::sp(), + // b::op("!="), + // b::sp(), + // b::string("* master") + // ] + // ), + // None + // ) + // ])) + // ); + + // assert_eq!( + // apply(pipeline, "pipeline", "ls | where { $it.size > 100 }"), + // build_token(b::pipeline(vec![ + // (None, b::call(b::bare("ls"), vec![]), Some(" ")), + // ( + // Some(" "), + // b::call( + // b::bare("where"), + // vec![ + // b::sp(), + // b::braced(vec![ + // b::path(b::var("it"), vec![b::member("size")]), + // b::sp(), + // b::op(">"), + // b::sp(), + // b::int(100) + // ]) + // ] + // ), + // None + // ) + // ])) + // ) + // } + + fn apply( + f: impl Fn(NomSpan) -> Result<(NomSpan, TokenNode), nom::Err<(NomSpan, nom::error::ErrorKind)>>, + desc: &str, + string: &str, + ) -> TokenNode { + f(nom_input(string)).unwrap().1 + } + + fn span((left, right): (usize, usize)) -> Span { + Span::new(left, right) } fn delimited( - delimiter: Delimiter, + delimiter: Spanned, children: Vec, left: usize, right: usize, ) -> TokenNode { - let node = DelimitedNode::new(delimiter, children); - let spanned = Tagged::from_simple_spanned_item(node, (left, right)); + let start = Span::for_char(left); + let end = Span::for_char(right); + + let node = DelimitedNode::new(delimiter.item, (start, end), children); + let spanned = node.spanned(Span::new(left, right)); TokenNode::Delimited(spanned) } - fn path(head: TokenNode, tail: Vec, left: usize, right: usize) -> TokenNode { - let node = PathNode::new( - Box::new(head), - tail.into_iter().map(TokenNode::Token).collect(), - ); - let spanned = Tagged::from_simple_spanned_item(node, (left, right)); - TokenNode::Path(spanned) - } - - fn leaf_token(token: RawToken, left: usize, right: usize) -> TokenNode { - TokenNode::Token(Tagged::from_simple_spanned_item(token, (left, right))) - } - fn token(token: RawToken, left: usize, right: usize) -> TokenNode { - TokenNode::Token(Tagged::from_simple_spanned_item(token, (left, right))) + TokenNode::Token(token.spanned(Span::new(left, right))) } fn build(block: CurriedNode) -> T { @@ -1169,7 +1284,6 @@ mod tests { } fn build_token(block: CurriedToken) -> TokenNode { - let mut builder = TokenTreeBuilder::new(); - block(&mut builder) + TokenTreeBuilder::build(block).0 } } diff --git a/src/parser/parse/pipeline.rs b/src/parser/parse/pipeline.rs index 20365bfbc7..4a8c72119c 100644 --- a/src/parser/parse/pipeline.rs +++ b/src/parser/parse/pipeline.rs @@ -1,19 +1,44 @@ -use crate::parser::CallNode; -use crate::{Span, Tagged}; +use crate::parser::TokenNode; +use crate::traits::ToDebug; +use crate::{Span, Spanned}; use derive_new::new; use getset::Getters; +use std::fmt; -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)] pub struct Pipeline { - pub(crate) parts: Vec, - pub(crate) post_ws: Option, + #[get = "pub"] + pub(crate) parts: Vec>, + // pub(crate) post_ws: Option, +} + +impl ToDebug for Pipeline { + fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { + for part in self.parts.iter() { + write!(f, "{}", part.debug(source))?; + } + + Ok(()) + } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] pub struct PipelineElement { - pub pre_ws: Option, - #[get = "pub(crate)"] - call: Tagged, - pub post_ws: Option, - pub post_pipe: Option, + pub pipe: Option, + #[get = "pub"] + pub tokens: Spanned>, +} + +impl ToDebug for PipelineElement { + fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { + if let Some(pipe) = self.pipe { + write!(f, "{}", pipe.slice(source))?; + } + + for token in &self.tokens.item { + write!(f, "{}", token.debug(source))?; + } + + Ok(()) + } } diff --git a/src/parser/parse/token_tree.rs b/src/parser/parse/token_tree.rs index df189a1a0c..0d00dcff0d 100644 --- a/src/parser/parse/token_tree.rs +++ b/src/parser/parse/token_tree.rs @@ -1,8 +1,9 @@ use crate::errors::ShellError; use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*}; -use crate::{Span, Tagged, Text}; +use crate::prelude::*; +use crate::traits::ToDebug; +use crate::{Tagged, Text}; use derive_new::new; -use enum_utils::FromStr; use getset::Getters; use std::fmt; @@ -10,16 +11,20 @@ use std::fmt; pub enum TokenNode { Token(Token), - Call(Tagged), - Delimited(Tagged), - Pipeline(Tagged), - Operator(Tagged), - Flag(Tagged), - Member(Span), + Call(Spanned), + Nodes(Spanned>), + Delimited(Spanned), + Pipeline(Spanned), + Flag(Spanned), Whitespace(Span), - Error(Tagged>), - Path(Tagged), + Error(Spanned), +} + +impl ToDebug for TokenNode { + fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { + write!(f, "{:?}", self.old_debug(&Text::from(source))) + } } pub struct DebugTokenNode<'a> { @@ -34,11 +39,11 @@ impl fmt::Debug for DebugTokenNode<'_> { TokenNode::Call(s) => { write!(f, "(")?; - write!(f, "{:?}", s.head().debug(self.source))?; + write!(f, "{}", s.head().debug(self.source))?; if let Some(children) = s.children() { for child in children { - write!(f, "{:?}", child.debug(self.source))?; + write!(f, "{}", child.debug(self.source))?; } } @@ -57,7 +62,7 @@ impl fmt::Debug for DebugTokenNode<'_> { )?; for child in d.children() { - write!(f, "{:?}", child.debug(self.source))?; + write!(f, "{:?}", child.old_debug(self.source))?; } write!( @@ -70,8 +75,8 @@ impl fmt::Debug for DebugTokenNode<'_> { } ) } - TokenNode::Pipeline(_) => write!(f, ""), - TokenNode::Error(s) => write!(f, " for {:?}", s.span().slice(self.source)), + TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)), + TokenNode::Error(_) => write!(f, ""), rest => write!(f, "{}", rest.span().slice(self.source)), } } @@ -86,36 +91,39 @@ impl From<&TokenNode> for Span { impl TokenNode { pub fn span(&self) -> Span { match self { - TokenNode::Token(t) => t.span(), - TokenNode::Call(s) => s.span(), - TokenNode::Delimited(s) => s.span(), - TokenNode::Pipeline(s) => s.span(), - TokenNode::Operator(s) => s.span(), - TokenNode::Flag(s) => s.span(), - TokenNode::Member(s) => *s, + TokenNode::Token(t) => t.span, + TokenNode::Nodes(t) => t.span, + TokenNode::Call(s) => s.span, + TokenNode::Delimited(s) => s.span, + TokenNode::Pipeline(s) => s.span, + TokenNode::Flag(s) => s.span, TokenNode::Whitespace(s) => *s, - TokenNode::Error(s) => s.span(), - TokenNode::Path(s) => s.span(), + TokenNode::Error(s) => s.span, } } - pub fn type_name(&self) -> String { + pub fn type_name(&self) -> &'static str { match self { TokenNode::Token(t) => t.type_name(), + TokenNode::Nodes(_) => "nodes", TokenNode::Call(_) => "command", TokenNode::Delimited(d) => d.type_name(), TokenNode::Pipeline(_) => "pipeline", - TokenNode::Operator(_) => "operator", TokenNode::Flag(_) => "flag", - TokenNode::Member(_) => "member", TokenNode::Whitespace(_) => "whitespace", TokenNode::Error(_) => "error", - TokenNode::Path(_) => "path", } - .to_string() } - pub fn debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> { + pub fn spanned_type_name(&self) -> Spanned<&'static str> { + self.type_name().spanned(self.span()) + } + + pub fn tagged_type_name(&self) -> Tagged<&'static str> { + self.type_name().tagged(self.span()) + } + + pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> { DebugTokenNode { node: self, source } } @@ -127,9 +135,19 @@ impl TokenNode { self.span().slice(source) } + pub fn get_variable(&self) -> Result<(Span, Span), ShellError> { + match self { + TokenNode::Token(Spanned { + item: RawToken::Variable(inner_span), + span: outer_span, + }) => Ok((*outer_span, *inner_span)), + _ => Err(ShellError::type_error("variable", self.tagged_type_name())), + } + } + pub fn is_bare(&self) -> bool { match self { - TokenNode::Token(Tagged { + TokenNode::Token(Spanned { item: RawToken::Bare, .. }) => true, @@ -137,30 +155,75 @@ impl TokenNode { } } - pub fn is_external(&self) -> bool { + pub fn is_string(&self) -> bool { match self { - TokenNode::Token(Tagged { - item: RawToken::External(..), + TokenNode::Token(Spanned { + item: RawToken::String(_), .. }) => true, _ => false, } } - pub fn expect_external(&self) -> Span { + pub fn as_string(&self) -> Option<(Span, Span)> { match self { - TokenNode::Token(Tagged { - item: RawToken::External(span), - .. - }) => *span, - _ => panic!("Only call expect_external if you checked is_external first"), + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span: outer_span, + }) => Some((*outer_span, *inner_span)), + _ => None, } } - pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option> { + pub fn is_pattern(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::GlobPattern, + .. + }) => true, + _ => false, + } + } + + pub fn is_dot(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + .. + }) => true, + _ => false, + } + } + + pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> { + match self { + TokenNode::Delimited(Spanned { + item: + DelimitedNode { + delimiter, + children, + spans, + }, + span, + }) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)), + _ => None, + } + } + + pub fn is_external(&self) -> bool { + match self { + TokenNode::Token(Spanned { + item: RawToken::ExternalCommand(..), + .. + }) => true, + _ => false, + } + } + + pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option> { match self { TokenNode::Flag( - flag @ Tagged { + flag @ Spanned { item: Flag { .. }, .. }, ) if value == flag.name().slice(source) => Some(*flag), @@ -170,8 +233,15 @@ impl TokenNode { pub fn as_pipeline(&self) -> Result { match self { - TokenNode::Pipeline(Tagged { item, .. }) => Ok(item.clone()), - _ => Err(ShellError::string("unimplemented")), + TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()), + _ => Err(ShellError::type_error("pipeline", self.tagged_type_name())), + } + } + + pub fn is_whitespace(&self) -> bool { + match self { + TokenNode::Whitespace(_) => true, + _ => false, } } } @@ -179,8 +249,9 @@ impl TokenNode { #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[get = "pub(crate)"] pub struct DelimitedNode { - delimiter: Delimiter, - children: Vec, + pub(crate) delimiter: Delimiter, + pub(crate) spans: (Span, Span), + pub(crate) children: Vec, } impl DelimitedNode { @@ -193,16 +264,107 @@ impl DelimitedNode { } } -#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, FromStr)] +#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] pub enum Delimiter { Paren, Brace, Square, } +impl Delimiter { + pub(crate) fn open(&self) -> &'static str { + match self { + Delimiter::Paren => "(", + Delimiter::Brace => "{", + Delimiter::Square => "[", + } + } + + pub(crate) fn close(&self) -> &'static str { + match self { + Delimiter::Paren => ")", + Delimiter::Brace => "}", + Delimiter::Square => "]", + } + } +} + #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[get = "pub(crate)"] pub struct PathNode { head: Box, tail: Vec, } + +#[cfg(test)] +impl TokenNode { + pub fn expect_external(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::ExternalCommand(span), + .. + }) => *span, + other => panic!( + "Only call expect_external if you checked is_external first, found {:?}", + other + ), + } + } + + pub fn expect_string(&self) -> (Span, Span) { + match self { + TokenNode::Token(Spanned { + item: RawToken::String(inner_span), + span: outer_span, + }) => (*outer_span, *inner_span), + other => panic!("Expected string, found {:?}", other), + } + } + + pub fn expect_list(&self) -> &[TokenNode] { + match self { + TokenNode::Nodes(token_nodes) => &token_nodes[..], + other => panic!("Expected list, found {:?}", other), + } + } + + pub fn expect_pattern(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::GlobPattern, + span: outer_span, + }) => *outer_span, + other => panic!("Expected pattern, found {:?}", other), + } + } + + pub fn expect_var(&self) -> (Span, Span) { + match self { + TokenNode::Token(Spanned { + item: RawToken::Variable(inner_span), + span: outer_span, + }) => (*outer_span, *inner_span), + other => panic!("Expected var, found {:?}", other), + } + } + + pub fn expect_dot(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::Operator(Operator::Dot), + span, + }) => *span, + other => panic!("Expected dot, found {:?}", other), + } + } + + pub fn expect_bare(&self) -> Span { + match self { + TokenNode::Token(Spanned { + item: RawToken::Bare, + span, + }) => *span, + other => panic!("Expected bare, found {:?}", other), + } + } +} diff --git a/src/parser/parse/token_tree_builder.rs b/src/parser/parse/token_tree_builder.rs index ad4eebdc8a..7146a3c201 100644 --- a/src/parser/parse/token_tree_builder.rs +++ b/src/parser/parse/token_tree_builder.rs @@ -3,95 +3,98 @@ use crate::prelude::*; use crate::parser::parse::flag::{Flag, FlagKind}; use crate::parser::parse::operator::Operator; use crate::parser::parse::pipeline::{Pipeline, PipelineElement}; -use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode}; +use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; use crate::parser::parse::tokens::{RawNumber, RawToken}; -use crate::parser::parse::unit::Unit; use crate::parser::CallNode; -use crate::Span; use derive_new::new; #[derive(new)] pub struct TokenTreeBuilder { #[new(default)] pos: usize, + + #[new(default)] + output: String, } pub type CurriedToken = Box TokenNode + 'static>; -pub type CurriedCall = Box Tagged + 'static>; +pub type CurriedCall = Box Spanned + 'static>; impl TokenTreeBuilder { - pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> TokenNode { + pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { let mut builder = TokenTreeBuilder::new(); - block(&mut builder) + let node = block(&mut builder); + (node, builder.output) } - pub fn pipeline(input: Vec<(Option<&str>, CurriedCall, Option<&str>)>) -> CurriedToken { - let input: Vec<(Option, CurriedCall, Option)> = input - .into_iter() - .map(|(pre, call, post)| { - ( - pre.map(|s| s.to_string()), - call, - post.map(|s| s.to_string()), - ) - }) - .collect(); + fn build_spanned( + &mut self, + callback: impl FnOnce(&mut TokenTreeBuilder) -> T, + ) -> Spanned { + let start = self.pos; + let ret = callback(self); + let end = self.pos; + ret.spanned(Span::new(start, end)) + } + + pub fn pipeline(input: Vec>) -> CurriedToken { Box::new(move |b| { let start = b.pos; - let mut out: Vec = vec![]; + let mut out: Vec> = vec![]; let mut input = input.into_iter().peekable(); - let (pre, call, post) = input + let head = input .next() .expect("A pipeline must contain at least one element"); - let pre_span = pre.map(|pre| b.consume(&pre)); - let call = call(b); - let post_span = post.map(|post| b.consume(&post)); - let pipe = input.peek().map(|_| Span::from(b.consume("|"))); - out.push(PipelineElement::new( - pre_span.map(Span::from), - call, - post_span.map(Span::from), - pipe, - )); + let pipe = None; + let head = b.build_spanned(|b| head.into_iter().map(|node| node(b)).collect()); + + let head_span: Span = head.span; + out.push(PipelineElement::new(pipe, head).spanned(head_span)); loop { match input.next() { None => break, - Some((pre, call, post)) => { - let pre_span = pre.map(|pre| b.consume(&pre)); - let call = call(b); - let post_span = post.map(|post| b.consume(&post)); + Some(node) => { + let start = b.pos; + let pipe = Some(b.consume_span("|")); + let node = + b.build_spanned(|b| node.into_iter().map(|node| node(b)).collect()); + let end = b.pos; - let pipe = input.peek().map(|_| Span::from(b.consume("|"))); - - out.push(PipelineElement::new( - pre_span.map(Span::from), - call, - post_span.map(Span::from), - pipe, - )); + out.push(PipelineElement::new(pipe, node).spanned(Span::new(start, end))); } } } let end = b.pos; - TokenTreeBuilder::spanned_pipeline((out, None), (start, end)) + TokenTreeBuilder::spanned_pipeline(out, Span::new(start, end)) }) } pub fn spanned_pipeline( - input: (Vec, Option), + input: Vec>, span: impl Into, ) -> TokenNode { - TokenNode::Pipeline(Tagged::from_simple_spanned_item( - Pipeline::new(input.0, input.1.into()), - span, - )) + TokenNode::Pipeline(Pipeline::new(input).spanned(span)) + } + + pub fn token_list(input: Vec) -> CurriedToken { + Box::new(move |b| { + let start = b.pos; + let tokens = input.into_iter().map(|i| i(b)).collect(); + let end = b.pos; + + TokenTreeBuilder::spanned_token_list(tokens, Span::new(start, end)) + }) + } + + pub fn spanned_token_list(input: Vec, span: impl Into) -> TokenNode { + TokenNode::Nodes(input.spanned(span.into())) } pub fn op(input: impl Into) -> CurriedToken { @@ -102,12 +105,12 @@ impl TokenTreeBuilder { b.pos = end; - TokenTreeBuilder::spanned_op(input, (start, end)) + TokenTreeBuilder::spanned_op(input, Span::new(start, end)) }) } pub fn spanned_op(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Operator(Tagged::from_simple_spanned_item(input.into(), span.into())) + TokenNode::Token(RawToken::Operator(input.into()).spanned(span.into())) } pub fn string(input: impl Into) -> CurriedToken { @@ -119,15 +122,15 @@ impl TokenTreeBuilder { let (_, end) = b.consume("\""); b.pos = end; - TokenTreeBuilder::spanned_string((inner_start, inner_end), (start, end)) + TokenTreeBuilder::spanned_string( + Span::new(inner_start, inner_end), + Span::new(start, end), + ) }) } pub fn spanned_string(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Token(Tagged::from_simple_spanned_item( - RawToken::String(input.into()), - span.into(), - )) + TokenNode::Token(RawToken::String(input.into()).spanned(span.into())) } pub fn bare(input: impl Into) -> CurriedToken { @@ -137,22 +140,61 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&input); b.pos = end; - TokenTreeBuilder::spanned_bare((start, end)) + TokenTreeBuilder::spanned_bare(Span::new(start, end)) }) } - pub fn spanned_bare(input: impl Into) -> TokenNode { - TokenNode::Token(Tagged::from_simple_spanned_item( - RawToken::Bare, - input.into(), - )) + pub fn spanned_bare(span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Bare.spanned(span)) } - pub fn spanned_external(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Token(Tagged::from_simple_spanned_item( - RawToken::External(input.into()), - span.into(), - )) + pub fn pattern(input: impl Into) -> CurriedToken { + let input = input.into(); + + Box::new(move |b| { + let (start, end) = b.consume(&input); + b.pos = end; + + TokenTreeBuilder::spanned_pattern(Span::new(start, end)) + }) + } + + pub fn spanned_pattern(input: impl Into) -> TokenNode { + TokenNode::Token(RawToken::GlobPattern.spanned(input.into())) + } + + pub fn external_word(input: impl Into) -> CurriedToken { + let input = input.into(); + + Box::new(move |b| { + let (start, end) = b.consume(&input); + b.pos = end; + + TokenTreeBuilder::spanned_external_word(Span::new(start, end)) + }) + } + + pub fn spanned_external_word(input: impl Into) -> TokenNode { + TokenNode::Token(RawToken::ExternalWord.spanned(input.into())) + } + + pub fn external_command(input: impl Into) -> CurriedToken { + let input = input.into(); + + Box::new(move |b| { + let (outer_start, _) = b.consume("^"); + let (inner_start, end) = b.consume(&input); + b.pos = end; + + TokenTreeBuilder::spanned_external_command( + Span::new(inner_start, end), + Span::new(outer_start, end), + ) + }) + } + + pub fn spanned_external_command(inner: impl Into, outer: impl Into) -> TokenNode { + TokenNode::Token(RawToken::ExternalCommand(inner.into()).spanned(outer.into())) } pub fn int(input: impl Into) -> CurriedToken { @@ -162,7 +204,10 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&int.to_string()); b.pos = end; - TokenTreeBuilder::spanned_number(RawNumber::Int((start, end).into()), (start, end)) + TokenTreeBuilder::spanned_number( + RawNumber::Int(Span::new(start, end)), + Span::new(start, end), + ) }) } @@ -173,69 +218,15 @@ impl TokenTreeBuilder { let (start, end) = b.consume(&decimal.to_string()); b.pos = end; - TokenTreeBuilder::spanned_number(RawNumber::Decimal((start, end).into()), (start, end)) - }) - } - - pub fn spanned_number(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Token(Tagged::from_simple_spanned_item( - RawToken::Number(input.into()), - span.into(), - )) - } - - pub fn size(int: impl Into, unit: impl Into) -> CurriedToken { - let int = int.into(); - let unit = unit.into(); - - Box::new(move |b| { - let (start_int, end_int) = b.consume(&int.to_string()); - let (_, end_unit) = b.consume(unit.as_str()); - b.pos = end_unit; - - TokenTreeBuilder::spanned_size( - (RawNumber::Int((start_int, end_int).into()), unit), - (start_int, end_unit), + TokenTreeBuilder::spanned_number( + RawNumber::Decimal(Span::new(start, end)), + Span::new(start, end), ) }) } - pub fn spanned_size( - input: (impl Into, impl Into), - span: impl Into, - ) -> TokenNode { - let (int, unit) = (input.0.into(), input.1.into()); - - TokenNode::Token(Tagged::from_simple_spanned_item( - RawToken::Size(int, unit), - span, - )) - } - - pub fn path(head: CurriedToken, tail: Vec) -> CurriedToken { - Box::new(move |b| { - let start = b.pos; - let head = head(b); - - let mut output = vec![]; - - for item in tail { - b.consume("."); - - output.push(item(b)); - } - - let end = b.pos; - - TokenTreeBuilder::spanned_path((head, output), (start, end)) - }) - } - - pub fn spanned_path(input: (TokenNode, Vec), span: impl Into) -> TokenNode { - TokenNode::Path(Tagged::from_simple_spanned_item( - PathNode::new(Box::new(input.0), input.1), - span, - )) + pub fn spanned_number(input: impl Into, span: impl Into) -> TokenNode { + TokenNode::Token(RawToken::Number(input.into()).spanned(span.into())) } pub fn var(input: impl Into) -> CurriedToken { @@ -245,15 +236,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("$"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::spanned_var((inner_start, end), (start, end)) + TokenTreeBuilder::spanned_var(Span::new(inner_start, end), Span::new(start, end)) }) } pub fn spanned_var(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Token(Tagged::from_simple_spanned_item( - RawToken::Variable(input.into()), - span.into(), - )) + TokenNode::Token(RawToken::Variable(input.into()).spanned(span.into())) } pub fn flag(input: impl Into) -> CurriedToken { @@ -263,15 +251,12 @@ impl TokenTreeBuilder { let (start, _) = b.consume("--"); let (inner_start, end) = b.consume(&input); - TokenTreeBuilder::spanned_flag((inner_start, end), (start, end)) + TokenTreeBuilder::spanned_flag(Span::new(inner_start, end), Span::new(start, end)) }) } pub fn spanned_flag(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Flag(Tagged::from_simple_spanned_item( - Flag::new(FlagKind::Longhand, input.into()), - span.into(), - )) + TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).spanned(span.into())) } pub fn shorthand(input: impl Into) -> CurriedToken { @@ -286,23 +271,7 @@ impl TokenTreeBuilder { } pub fn spanned_shorthand(input: impl Into, span: impl Into) -> TokenNode { - TokenNode::Flag(Tagged::from_simple_spanned_item( - Flag::new(FlagKind::Shorthand, input.into()), - span.into(), - )) - } - - pub fn member(input: impl Into) -> CurriedToken { - let input = input.into(); - - Box::new(move |b| { - let (start, end) = b.consume(&input); - TokenTreeBuilder::spanned_member((start, end)) - }) - } - - pub fn spanned_member(span: impl Into) -> TokenNode { - TokenNode::Member(span.into()) + TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).spanned(span.into())) } pub fn call(head: CurriedToken, input: Vec) -> CurriedCall { @@ -318,11 +287,11 @@ impl TokenTreeBuilder { let end = b.pos; - TokenTreeBuilder::spanned_call(nodes, (start, end)) + TokenTreeBuilder::spanned_call(nodes, Span::new(start, end)) }) } - pub fn spanned_call(input: Vec, span: impl Into) -> Tagged { + pub fn spanned_call(input: Vec, span: impl Into) -> Spanned { if input.len() == 0 { panic!("BUG: spanned call (TODO)") } @@ -332,76 +301,88 @@ impl TokenTreeBuilder { let head = input.next().unwrap(); let tail = input.collect(); - Tagged::from_simple_spanned_item(CallNode::new(Box::new(head), tail), span) + CallNode::new(Box::new(head), tail).spanned(span.into()) + } + + fn consume_delimiter( + &mut self, + input: Vec, + _open: &str, + _close: &str, + ) -> (Span, Span, Span, Vec) { + let (start_open_paren, end_open_paren) = self.consume("("); + let mut output = vec![]; + for item in input { + output.push(item(self)); + } + + let (start_close_paren, end_close_paren) = self.consume(")"); + + let open = Span::new(start_open_paren, end_open_paren); + let close = Span::new(start_close_paren, end_close_paren); + let whole = Span::new(start_open_paren, end_close_paren); + + (open, close, whole, output) } pub fn parens(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("("); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, output) = b.consume_delimiter(input, "(", ")"); - let (_, end) = b.consume(")"); - - TokenTreeBuilder::spanned_parens(output, (start, end)) + TokenTreeBuilder::spanned_parens(output, (open, close), whole) }) } - pub fn spanned_parens(input: impl Into>, span: impl Into) -> TokenNode { - TokenNode::Delimited(Tagged::from_simple_spanned_item( - DelimitedNode::new(Delimiter::Paren, input.into()), - span, - )) + pub fn spanned_parens( + input: impl Into>, + spans: (Span, Span), + span: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()), + ) } pub fn square(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("["); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]"); - let (_, end) = b.consume("]"); - - TokenTreeBuilder::spanned_square(output, (start, end)) + TokenTreeBuilder::spanned_square(tokens, (open, close), whole) }) } - pub fn spanned_square(input: impl Into>, span: impl Into) -> TokenNode { - TokenNode::Delimited(Tagged::from_simple_spanned_item( - DelimitedNode::new(Delimiter::Square, input.into()), - span, - )) + pub fn spanned_square( + input: impl Into>, + spans: (Span, Span), + span: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()), + ) } pub fn braced(input: Vec) -> CurriedToken { Box::new(move |b| { - let (start, _) = b.consume("{ "); - let mut output = vec![]; - for item in input { - output.push(item(b)); - } + let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}"); - let (_, end) = b.consume(" }"); - - TokenTreeBuilder::spanned_brace(output, (start, end)) + TokenTreeBuilder::spanned_brace(tokens, (open, close), whole) }) } - pub fn spanned_brace(input: impl Into>, span: impl Into) -> TokenNode { - TokenNode::Delimited(Tagged::from_simple_spanned_item( - DelimitedNode::new(Delimiter::Brace, input.into()), - span, - )) + pub fn spanned_brace( + input: impl Into>, + spans: (Span, Span), + span: impl Into, + ) -> TokenNode { + TokenNode::Delimited( + DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()), + ) } pub fn sp() -> CurriedToken { Box::new(|b| { let (start, end) = b.consume(" "); - TokenNode::Whitespace(Span::from((start, end))) + TokenNode::Whitespace(Span::new(start, end)) }) } @@ -410,19 +391,25 @@ impl TokenTreeBuilder { Box::new(move |b| { let (start, end) = b.consume(&input); - TokenTreeBuilder::spanned_ws((start, end)) + TokenTreeBuilder::spanned_ws(Span::new(start, end)) }) } pub fn spanned_ws(span: impl Into) -> TokenNode { - let span = span.into(); - TokenNode::Whitespace(span.into()) } fn consume(&mut self, input: &str) -> (usize, usize) { let start = self.pos; self.pos += input.len(); + self.output.push_str(input); (start, self.pos) } + + fn consume_span(&mut self, input: &str) -> Span { + let start = self.pos; + self.pos += input.len(); + self.output.push_str(input); + Span::new(start, self.pos) + } } diff --git a/src/parser/parse/tokens.rs b/src/parser/parse/tokens.rs index ed9c1f72a4..29061ed7a2 100644 --- a/src/parser/parse/tokens.rs +++ b/src/parser/parse/tokens.rs @@ -1,19 +1,36 @@ -use crate::parser::parse::unit::*; +use crate::parser::Operator; use crate::prelude::*; -use crate::{Span, Tagged, Text}; +use crate::Text; use std::fmt; use std::str::FromStr; #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum RawToken { Number(RawNumber), - Size(RawNumber, Unit), + Operator(Operator), String(Span), Variable(Span), - External(Span), + ExternalCommand(Span), + ExternalWord, + GlobPattern, Bare, } +impl RawToken { + pub fn type_name(&self) -> &'static str { + match self { + RawToken::Number(_) => "number", + RawToken::Operator(..) => "operator", + RawToken::String(_) => "string", + RawToken::Variable(_) => "variable", + RawToken::ExternalCommand(_) => "external command", + RawToken::ExternalWord => "external word", + RawToken::GlobPattern => "glob pattern", + RawToken::Bare => "string", + } + } +} + #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum RawNumber { Int(Span), @@ -21,42 +38,29 @@ pub enum RawNumber { } impl RawNumber { - pub fn int(span: impl Into) -> Tagged { + pub fn int(span: impl Into) -> Spanned { let span = span.into(); - RawNumber::Int(span).tagged(span) + RawNumber::Int(span).spanned(span) } - pub fn decimal(span: impl Into) -> Tagged { + pub fn decimal(span: impl Into) -> Spanned { let span = span.into(); - RawNumber::Decimal(span).tagged(span) + RawNumber::Decimal(span).spanned(span) } pub(crate) fn to_number(self, source: &Text) -> Number { match self { - RawNumber::Int(span) => Number::Int(BigInt::from_str(span.slice(source)).unwrap()), - RawNumber::Decimal(span) => { - Number::Decimal(BigDecimal::from_str(span.slice(source)).unwrap()) + RawNumber::Int(tag) => Number::Int(BigInt::from_str(tag.slice(source)).unwrap()), + RawNumber::Decimal(tag) => { + Number::Decimal(BigDecimal::from_str(tag.slice(source)).unwrap()) } } } } -impl RawToken { - pub fn type_name(&self) -> &'static str { - match self { - RawToken::Number(_) => "Number", - RawToken::Size(..) => "Size", - RawToken::String(_) => "String", - RawToken::Variable(_) => "Variable", - RawToken::External(_) => "External", - RawToken::Bare => "String", - } - } -} - -pub type Token = Tagged; +pub type Token = Spanned; impl Token { pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> { @@ -65,6 +69,76 @@ impl Token { source, } } + + pub fn extract_number(&self) -> Option> { + match self.item { + RawToken::Number(number) => Some(number.spanned(self.span)), + _ => None, + } + } + + pub fn extract_int(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::Number(RawNumber::Int(int)) => Some((int, self.span)), + _ => None, + } + } + + pub fn extract_decimal(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.span)), + _ => None, + } + } + + pub fn extract_operator(&self) -> Option> { + match self.item { + RawToken::Operator(operator) => Some(operator.spanned(self.span)), + _ => None, + } + } + + pub fn extract_string(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::String(span) => Some((span, self.span)), + _ => None, + } + } + + pub fn extract_variable(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::Variable(span) => Some((span, self.span)), + _ => None, + } + } + + pub fn extract_external_command(&self) -> Option<(Span, Span)> { + match self.item { + RawToken::ExternalCommand(span) => Some((span, self.span)), + _ => None, + } + } + + pub fn extract_external_word(&self) -> Option { + match self.item { + RawToken::ExternalWord => Some(self.span), + _ => None, + } + } + + pub fn extract_glob_pattern(&self) -> Option { + match self.item { + RawToken::GlobPattern => Some(self.span), + _ => None, + } + } + + pub fn extract_bare(&self) -> Option { + match self.item { + RawToken::Bare => Some(self.span), + _ => None, + } + } } pub struct DebugToken<'a> { @@ -74,6 +148,6 @@ pub struct DebugToken<'a> { impl fmt::Debug for DebugToken<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.node.span().slice(self.source)) + write!(f, "{}", self.node.span.slice(self.source)) } } diff --git a/src/parser/parse/unit.rs b/src/parser/parse/unit.rs index a29ce15b9f..e89986f8ac 100644 --- a/src/parser/parse/unit.rs +++ b/src/parser/parse/unit.rs @@ -1,4 +1,4 @@ -use crate::object::base::Value; +use crate::data::base::Value; use crate::prelude::*; use serde::{Deserialize, Serialize}; use std::str::FromStr; @@ -39,12 +39,6 @@ impl Unit { } } -impl From<&str> for Unit { - fn from(input: &str) -> Unit { - Unit::from_str(input).unwrap() - } -} - impl FromStr for Unit { type Err = (); fn from_str(input: &str) -> Result::Err> { diff --git a/src/parser/parse_command.rs b/src/parser/parse_command.rs index 33ad25e6f3..d531da62ac 100644 --- a/src/parser/parse_command.rs +++ b/src/parser/parse_command.rs @@ -1,94 +1,38 @@ -use crate::context::Context; use crate::errors::{ArgumentError, ShellError}; -use crate::parser::registry::{NamedType, PositionalType, Signature}; -use crate::parser::{baseline_parse_tokens, CallNode}; -use crate::parser::{ - hir::{self, NamedArguments}, - Flag, RawToken, TokenNode, +use crate::parser::hir::syntax_shape::{ + color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced, + BackoffColoringMode, ColorSyntax, MaybeSpaceShape, }; -use crate::{Span, Tag, Tagged, Text}; +use crate::parser::registry::{NamedType, PositionalType, Signature}; +use crate::parser::TokensIterator; +use crate::parser::{ + hir::{self, ExpandContext, NamedArguments}, + Flag, +}; +use crate::traits::ToDebug; +use crate::{Span, Spanned, Tag, Text}; use log::trace; -pub fn parse_command( +pub fn parse_command_tail( config: &Signature, - context: &Context, - call: &Tagged, - source: &Text, -) -> Result { - let Tagged { item: raw_call, .. } = call; - - trace!("Processing {:?}", config); - - let head = parse_command_head(call.head())?; - - let children: Option> = raw_call.children().as_ref().map(|nodes| { - nodes - .iter() - .cloned() - .filter(|node| match node { - TokenNode::Whitespace(_) => false, - _ => true, - }) - .collect() - }); - - match parse_command_tail(&config, context, children, source, call.span())? { - None => Ok(hir::Call::new(Box::new(head), None, None)), - Some((positional, named)) => Ok(hir::Call::new(Box::new(head), positional, named)), - } -} - -fn parse_command_head(head: &TokenNode) -> Result { - match head { - TokenNode::Token( - spanned @ Tagged { - item: RawToken::Bare, - .. - }, - ) => Ok(spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare))), - - TokenNode::Token(Tagged { - item: RawToken::String(inner_span), - tag: Tag { span, origin: None }, - }) => Ok(Tagged::from_simple_spanned_item( - hir::RawExpression::Literal(hir::Literal::String(*inner_span)), - *span, - )), - - other => Err(ShellError::unexpected(&format!( - "command head -> {:?}", - other - ))), - } -} - -fn parse_command_tail( - config: &Signature, - context: &Context, - tail: Option>, - source: &Text, + context: &ExpandContext, + tail: &mut TokensIterator, command_span: Span, ) -> Result>, Option)>, ShellError> { - let tail = &mut match &tail { - None => hir::TokensIterator::new(&[]), - Some(tail) => hir::TokensIterator::new(tail), - }; - let mut named = NamedArguments::new(); - - trace_remaining("nodes", tail.clone(), source); + trace_remaining("nodes", tail.clone(), context.source()); for (name, kind) in &config.named { trace!(target: "nu::parse", "looking for {} : {:?}", name, kind); - match kind { + match &kind.0 { NamedType::Switch => { - let flag = extract_switch(name, tail, source); + let flag = extract_switch(name, tail, context.source()); named.insert_switch(name, flag); } NamedType::Mandatory(syntax_type) => { - match extract_mandatory(config, name, tail, source, command_span) { + match extract_mandatory(config, name, tail, context.source(), command_span) { Err(err) => return Err(err), // produce a correct diagnostic Ok((pos, flag)) => { tail.move_to(pos); @@ -97,85 +41,104 @@ fn parse_command_tail( return Err(ShellError::argument_error( config.name.clone(), ArgumentError::MissingValueForName(name.to_string()), - flag.span(), + flag.span, )); } - let expr = - hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?; + let expr = expand_expr(&spaced(*syntax_type), tail, context)?; tail.restart(); named.insert_mandatory(name, expr); } } } - NamedType::Optional(syntax_type) => match extract_optional(name, tail, source) { - Err(err) => return Err(err), // produce a correct diagnostic - Ok(Some((pos, flag))) => { - tail.move_to(pos); + NamedType::Optional(syntax_type) => { + match extract_optional(name, tail, context.source()) { + Err(err) => return Err(err), // produce a correct diagnostic + Ok(Some((pos, flag))) => { + tail.move_to(pos); - if tail.at_end() { - return Err(ShellError::argument_error( - config.name.clone(), - ArgumentError::MissingValueForName(name.to_string()), - flag.span(), - )); + if tail.at_end() { + return Err(ShellError::argument_error( + config.name.clone(), + ArgumentError::MissingValueForName(name.to_string()), + flag.span, + )); + } + + let expr = expand_expr(&spaced(*syntax_type), tail, context); + + match expr { + Err(_) => named.insert_optional(name, None), + Ok(expr) => named.insert_optional(name, Some(expr)), + } + + tail.restart(); } - let expr = hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?; - - tail.restart(); - named.insert_optional(name, Some(expr)); + Ok(None) => { + tail.restart(); + named.insert_optional(name, None); + } } - - Ok(None) => { - tail.restart(); - named.insert_optional(name, None); - } - }, + } }; } - trace_remaining("after named", tail.clone(), source); + trace_remaining("after named", tail.clone(), context.source()); let mut positional = vec![]; for arg in &config.positional { - trace!("Processing positional {:?}", arg); + trace!(target: "nu::parse", "Processing positional {:?}", arg); - match arg { + match &arg.0 { PositionalType::Mandatory(..) => { - if tail.len() == 0 { + if tail.at_end_possible_ws() { return Err(ShellError::argument_error( config.name.clone(), - ArgumentError::MissingMandatoryPositional(arg.name().to_string()), - command_span, + ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()), + Tag { + span: command_span, + anchor: None, + }, )); } } PositionalType::Optional(..) => { - if tail.len() == 0 { + if tail.at_end_possible_ws() { break; } } } - let result = hir::baseline_parse_next_expr(tail, context, source, arg.syntax_type())?; + let result = expand_expr(&spaced(arg.0.syntax_type()), tail, context)?; positional.push(result); } - trace_remaining("after positional", tail.clone(), source); + trace_remaining("after positional", tail.clone(), context.source()); - if let Some(syntax_type) = config.rest_positional { - let remainder = baseline_parse_tokens(tail, context, source, syntax_type)?; - positional.extend(remainder); + if let Some((syntax_type, _)) = config.rest_positional { + let mut out = vec![]; + + loop { + if tail.at_end_possible_ws() { + break; + } + + let next = expand_expr(&spaced(syntax_type), tail, context)?; + + out.push(next); + } + + positional.extend(out); } - trace_remaining("after rest", tail.clone(), source); + trace_remaining("after rest", tail.clone(), context.source()); - trace!("Constructed positional={:?} named={:?}", positional, named); + trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named); let positional = if positional.len() == 0 { None @@ -191,11 +154,434 @@ fn parse_command_tail( Some(named) }; - trace!("Normalized positional={:?} named={:?}", positional, named); + trace!(target: "nu::parse", "Normalized positional={:?} named={:?}", positional, named); Ok(Some((positional, named))) } +#[derive(Debug)] +struct ColoringArgs { + vec: Vec>>>, +} + +impl ColoringArgs { + fn new(len: usize) -> ColoringArgs { + let vec = vec![None; len]; + ColoringArgs { vec } + } + + fn insert(&mut self, pos: usize, shapes: Vec>) { + self.vec[pos] = Some(shapes); + } + + fn spread_shapes(self, shapes: &mut Vec>) { + for item in self.vec { + match item { + None => {} + Some(vec) => { + shapes.extend(vec); + } + } + } + } +} + +#[derive(Debug, Copy, Clone)] +pub struct CommandTailShape; + +#[cfg(not(coloring_in_tokens))] +impl ColorSyntax for CommandTailShape { + type Info = (); + type Input = Signature; + + fn color_syntax<'a, 'b>( + &self, + signature: &Signature, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + shapes: &mut Vec>, + ) -> Self::Info { + let mut args = ColoringArgs::new(token_nodes.len()); + trace_remaining("nodes", token_nodes.clone(), context.source()); + + for (name, kind) in &signature.named { + trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); + + match &kind.0 { + NamedType::Switch => { + match token_nodes.extract(|t| t.as_flag(name, context.source())) { + Some((pos, flag)) => args.insert(pos, vec![flag.color()]), + None => {} + } + } + NamedType::Mandatory(syntax_type) => { + match extract_mandatory( + signature, + name, + token_nodes, + context.source(), + Span::unknown(), + ) { + Err(_) => { + // The mandatory flag didn't exist at all, so there's nothing to color + } + Ok((pos, flag)) => { + let mut shapes = vec![flag.color()]; + token_nodes.move_to(pos); + + if token_nodes.at_end() { + args.insert(pos, shapes); + token_nodes.restart(); + continue; + } + + // We can live with unmatched syntax after a mandatory flag + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax( + syntax_type, + token_nodes, + context, + &mut shapes, + ) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + } + } + NamedType::Optional(syntax_type) => { + match extract_optional(name, token_nodes, context.source()) { + Err(_) => { + // The optional flag didn't exist at all, so there's nothing to color + } + Ok(Some((pos, flag))) => { + let mut shapes = vec![flag.color()]; + token_nodes.move_to(pos); + + if token_nodes.at_end() { + args.insert(pos, shapes); + token_nodes.restart(); + continue; + } + + // We can live with unmatched syntax after an optional flag + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax( + syntax_type, + token_nodes, + context, + &mut shapes, + ) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + + Ok(None) => { + token_nodes.restart(); + } + } + } + }; + } + + trace_remaining("after named", token_nodes.clone(), context.source()); + + for arg in &signature.positional { + trace!("Processing positional {:?}", arg); + + match arg.0 { + PositionalType::Mandatory(..) => { + if token_nodes.at_end() { + break; + } + } + + PositionalType::Optional(..) => { + if token_nodes.at_end() { + break; + } + } + } + + let mut shapes = vec![]; + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + // We can live with an unmatched positional argument. Hopefully it will be + // matched by a future token + let _ = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax( + &arg.0.syntax_type(), + token_nodes, + context, + &mut shapes, + )?; + + args.insert(pos, shapes); + + Ok(()) + }); + } + } + } + + trace_remaining("after positional", token_nodes.clone(), context.source()); + + if let Some((syntax_type, _)) = signature.rest_positional { + loop { + if token_nodes.at_end_possible_ws() { + break; + } + + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + let mut shapes = vec![]; + + // If any arguments don't match, we'll fall back to backoff coloring mode + let result = token_nodes.atomic(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax(&syntax_type, token_nodes, context, &mut shapes)?; + + args.insert(pos, shapes); + + Ok(()) + }); + + match result { + Err(_) => break, + Ok(_) => continue, + } + } + } + } + } + + args.spread_shapes(shapes); + + // Consume any remaining tokens with backoff coloring mode + color_syntax(&BackoffColoringMode, token_nodes, context, shapes); + + shapes.sort_by(|a, b| a.span.start().cmp(&b.span.start())); + } +} + +#[cfg(coloring_in_tokens)] +impl ColorSyntax for CommandTailShape { + type Info = (); + type Input = Signature; + + fn name(&self) -> &'static str { + "CommandTailShape" + } + + fn color_syntax<'a, 'b>( + &self, + signature: &Signature, + token_nodes: &'b mut TokensIterator<'a>, + context: &ExpandContext, + ) -> Self::Info { + let mut args = ColoringArgs::new(token_nodes.len()); + trace_remaining("nodes", token_nodes.clone(), context.source()); + + for (name, kind) in &signature.named { + trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind); + + match &kind.0 { + NamedType::Switch => { + match token_nodes.extract(|t| t.as_flag(name, context.source())) { + Some((pos, flag)) => args.insert(pos, vec![flag.color()]), + None => {} + } + } + NamedType::Mandatory(syntax_type) => { + match extract_mandatory( + signature, + name, + token_nodes, + context.source(), + Span::unknown(), + ) { + Err(_) => { + // The mandatory flag didn't exist at all, so there's nothing to color + } + Ok((pos, flag)) => { + let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + token_nodes.color_shape(flag.color()); + token_nodes.move_to(pos); + + if token_nodes.at_end() { + return Ok(()); + } + + // We still want to color the flag even if the following tokens don't match, so don't + // propagate the error to the parent atomic block if it fails + let _ = token_nodes.atomic(|token_nodes| { + // We can live with unmatched syntax after a mandatory flag + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax(syntax_type, token_nodes, context)?; + + Ok(()) + }); + + Ok(()) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + } + } + NamedType::Optional(syntax_type) => { + match extract_optional(name, token_nodes, context.source()) { + Err(_) => { + // The optional flag didn't exist at all, so there's nothing to color + } + Ok(Some((pos, flag))) => { + let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + token_nodes.color_shape(flag.color()); + token_nodes.move_to(pos); + + if token_nodes.at_end() { + return Ok(()); + } + + // We still want to color the flag even if the following tokens don't match, so don't + // propagate the error to the parent atomic block if it fails + let _ = token_nodes.atomic(|token_nodes| { + // We can live with unmatched syntax after a mandatory flag + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If the part after a mandatory flag isn't present, that's ok, but we + // should roll back any whitespace we chomped + color_fallible_syntax(syntax_type, token_nodes, context)?; + + Ok(()) + }); + + Ok(()) + }); + + args.insert(pos, shapes); + token_nodes.restart(); + } + + Ok(None) => { + token_nodes.restart(); + } + } + } + }; + } + + trace_remaining("after named", token_nodes.clone(), context.source()); + + for arg in &signature.positional { + trace!("Processing positional {:?}", arg); + + match &arg.0 { + PositionalType::Mandatory(..) => { + if token_nodes.at_end() { + break; + } + } + + PositionalType::Optional(..) => { + if token_nodes.at_end() { + break; + } + } + } + + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + // We can live with an unmatched positional argument. Hopefully it will be + // matched by a future token + let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax(&arg.0.syntax_type(), token_nodes, context)?; + + Ok(()) + }); + + args.insert(pos, shapes); + } + } + } + + trace_remaining("after positional", token_nodes.clone(), context.source()); + + if let Some((syntax_type, _)) = signature.rest_positional { + loop { + if token_nodes.at_end_possible_ws() { + break; + } + + let pos = token_nodes.pos(false); + + match pos { + None => break, + Some(pos) => { + // If any arguments don't match, we'll fall back to backoff coloring mode + let (result, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| { + color_syntax(&MaybeSpaceShape, token_nodes, context); + + // If no match, we should roll back any whitespace we chomped + color_fallible_syntax(&syntax_type, token_nodes, context)?; + + Ok(()) + }); + + args.insert(pos, shapes); + + match result { + Err(_) => break, + Ok(_) => continue, + } + } + } + } + } + + token_nodes.silently_mutate_shapes(|shapes| args.spread_shapes(shapes)); + + // Consume any remaining tokens with backoff coloring mode + color_syntax(&BackoffColoringMode, token_nodes, context); + + // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring + // this solution. + token_nodes.sort_shapes() + } +} + fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option { tokens .extract(|t| t.as_flag(name, source)) @@ -208,7 +594,7 @@ fn extract_mandatory( tokens: &mut hir::TokensIterator<'_>, source: &Text, span: Span, -) -> Result<(usize, Tagged), ShellError> { +) -> Result<(usize, Spanned), ShellError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { @@ -229,7 +615,7 @@ fn extract_optional( name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text, -) -> Result<(Option<(usize, Tagged)>), ShellError> { +) -> Result<(Option<(usize, Spanned)>), ShellError> { let flag = tokens.extract(|t| t.as_flag(name, source)); match flag { @@ -243,12 +629,13 @@ fn extract_optional( pub fn trace_remaining(desc: &'static str, tail: hir::TokensIterator<'_>, source: &Text) { trace!( + target: "nu::parse", "{} = {:?}", desc, itertools::join( tail.debug_remaining() .iter() - .map(|i| format!("%{:?}%", i.debug(source))), + .map(|i| format!("%{}%", i.debug(&source))), " " ) ); diff --git a/src/parser/registry.rs b/src/parser/registry.rs index 7112d91118..ff0a98ae85 100644 --- a/src/parser/registry.rs +++ b/src/parser/registry.rs @@ -1,46 +1,46 @@ // TODO: Temporary redirect pub(crate) use crate::context::CommandRegistry; use crate::evaluate::{evaluate_baseline_expr, Scope}; -use crate::parser::{hir, hir::SyntaxType, parse_command, CallNode}; +use crate::parser::{hir, hir::SyntaxShape}; use crate::prelude::*; use derive_new::new; use indexmap::IndexMap; -use log::trace; + use serde::{Deserialize, Serialize}; use std::fmt; #[derive(Debug, Serialize, Deserialize, Clone)] pub enum NamedType { Switch, - Mandatory(SyntaxType), - Optional(SyntaxType), + Mandatory(SyntaxShape), + Optional(SyntaxShape), } #[derive(Debug, Clone, Serialize, Deserialize)] pub enum PositionalType { - Mandatory(String, SyntaxType), - Optional(String, SyntaxType), + Mandatory(String, SyntaxShape), + Optional(String, SyntaxShape), } impl PositionalType { - pub fn mandatory(name: &str, ty: SyntaxType) -> PositionalType { + pub fn mandatory(name: &str, ty: SyntaxShape) -> PositionalType { PositionalType::Mandatory(name.to_string(), ty) } pub fn mandatory_any(name: &str) -> PositionalType { - PositionalType::Mandatory(name.to_string(), SyntaxType::Any) + PositionalType::Mandatory(name.to_string(), SyntaxShape::Any) } pub fn mandatory_block(name: &str) -> PositionalType { - PositionalType::Mandatory(name.to_string(), SyntaxType::Block) + PositionalType::Mandatory(name.to_string(), SyntaxShape::Block) } - pub fn optional(name: &str, ty: SyntaxType) -> PositionalType { + pub fn optional(name: &str, ty: SyntaxShape) -> PositionalType { PositionalType::Optional(name.to_string(), ty) } pub fn optional_any(name: &str) -> PositionalType { - PositionalType::Optional(name.to_string(), SyntaxType::Any) + PositionalType::Optional(name.to_string(), SyntaxShape::Any) } pub(crate) fn name(&self) -> &str { @@ -50,7 +50,7 @@ impl PositionalType { } } - pub(crate) fn syntax_type(&self) -> SyntaxType { + pub(crate) fn syntax_type(&self) -> SyntaxShape { match *self { PositionalType::Mandatory(_, t) => t, PositionalType::Optional(_, t) => t, @@ -58,17 +58,19 @@ impl PositionalType { } } +type Description = String; + #[derive(Debug, Serialize, Deserialize, Clone, new)] pub struct Signature { pub name: String, #[new(default)] pub usage: String, #[new(default)] - pub positional: Vec, + pub positional: Vec<(PositionalType, Description)>, #[new(value = "None")] - pub rest_positional: Option, + pub rest_positional: Option<(SyntaxShape, Description)>, #[new(default)] - pub named: IndexMap, + pub named: IndexMap, #[new(value = "false")] pub is_filter: bool, } @@ -83,23 +85,42 @@ impl Signature { self } - pub fn required(mut self, name: impl Into, ty: impl Into) -> Signature { - self.positional - .push(PositionalType::Mandatory(name.into(), ty.into())); + pub fn required( + mut self, + name: impl Into, + ty: impl Into, + desc: impl Into, + ) -> Signature { + self.positional.push(( + PositionalType::Mandatory(name.into(), ty.into()), + desc.into(), + )); self } - pub fn optional(mut self, name: impl Into, ty: impl Into) -> Signature { - self.positional - .push(PositionalType::Optional(name.into(), ty.into())); + pub fn optional( + mut self, + name: impl Into, + ty: impl Into, + desc: impl Into, + ) -> Signature { + self.positional.push(( + PositionalType::Optional(name.into(), ty.into()), + desc.into(), + )); self } - pub fn named(mut self, name: impl Into, ty: impl Into) -> Signature { + pub fn named( + mut self, + name: impl Into, + ty: impl Into, + desc: impl Into, + ) -> Signature { self.named - .insert(name.into(), NamedType::Optional(ty.into())); + .insert(name.into(), (NamedType::Optional(ty.into()), desc.into())); self } @@ -107,16 +128,18 @@ impl Signature { pub fn required_named( mut self, name: impl Into, - ty: impl Into, + ty: impl Into, + desc: impl Into, ) -> Signature { self.named - .insert(name.into(), NamedType::Mandatory(ty.into())); + .insert(name.into(), (NamedType::Mandatory(ty.into()), desc.into())); self } - pub fn switch(mut self, name: impl Into) -> Signature { - self.named.insert(name.into(), NamedType::Switch); + pub fn switch(mut self, name: impl Into, desc: impl Into) -> Signature { + self.named + .insert(name.into(), (NamedType::Switch, desc.into())); self } @@ -126,8 +149,8 @@ impl Signature { self } - pub fn rest(mut self, ty: SyntaxType) -> Signature { - self.rest_positional = Some(ty); + pub fn rest(mut self, ty: SyntaxShape, desc: impl Into) -> Signature { + self.rest_positional = Some((ty, desc.into())); self } } @@ -271,21 +294,6 @@ impl<'a> Iterator for PositionalIter<'a> { } } -impl Signature { - pub(crate) fn parse_args( - &self, - call: &Tagged, - context: &Context, - source: &Text, - ) -> Result { - let args = parse_command(self, context, call, source)?; - - trace!("parsed args: {:?}", args); - - Ok(args) - } -} - pub(crate) fn evaluate_args( call: &hir::Call, registry: &CommandRegistry, @@ -312,11 +320,8 @@ pub(crate) fn evaluate_args( for (name, value) in n.named.iter() { match value { - hir::named::NamedValue::PresentSwitch(span) => { - results.insert( - name.clone(), - Tagged::from_simple_spanned_item(Value::boolean(true), *span), - ); + hir::named::NamedValue::PresentSwitch(tag) => { + results.insert(name.clone(), Value::boolean(true).tagged(tag)); } hir::named::NamedValue::Value(expr) => { results.insert( diff --git a/src/plugin.rs b/src/plugin.rs index afd9871108..004e937fe8 100644 --- a/src/plugin.rs +++ b/src/plugin.rs @@ -32,7 +32,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { let input = match input { Some(arg) => std::fs::read_to_string(arg), None => { - send_response(ShellError::string(format!("No input given."))); + send_response(ShellError::untagged_runtime_error("No input given.")); return; } }; @@ -64,7 +64,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { return; } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {} {:?}", input, e ))); @@ -102,7 +102,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { break; } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {} {:?}", input, e ))); @@ -111,7 +111,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) { } } e => { - send_response(ShellError::string(format!( + send_response(ShellError::untagged_runtime_error(format!( "Could not handle plugin message: {:?}", e, ))); diff --git a/src/plugins/add.rs b/src/plugins/add.rs index 27a12e677e..5bda9d0593 100644 --- a/src/plugins/add.rs +++ b/src/plugins/add.rs @@ -1,10 +1,13 @@ +use itertools::Itertools; use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxType, Tagged, Value, + serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape, + Tagged, TaggedItem, Value, }; +pub type ColumnPath = Vec>; + struct Add { - field: Option, + field: Option, value: Option, } impl Add { @@ -18,24 +21,31 @@ impl Add { fn add(&self, value: Tagged) -> Result, ShellError> { let value_tag = value.tag(); match (value.item, self.value.clone()) { - (obj @ Value::Object(_), Some(v)) => match &self.field { - Some(f) => match obj.insert_data_at_path(value_tag, &f, v) { + (obj @ Value::Row(_), Some(v)) => match &self.field { + Some(f) => match obj.insert_data_at_column_path(value_tag.clone(), &f, v) { Some(v) => return Ok(v), None => { - return Err(ShellError::string(format!( - "add could not find place to insert field {:?} {}", - obj, f - ))) + return Err(ShellError::labeled_error( + format!( + "add could not find place to insert field {:?} {}", + obj, + f.iter().map(|i| &i.item).join(".") + ), + "column name", + &value_tag, + )) } }, - None => Err(ShellError::string( - "add needs a field when adding a value to an object", + None => Err(ShellError::labeled_error( + "add needs a column name when adding a value to a table", + "column name", + value_tag, )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + (value, _) => Err(ShellError::type_error( + "row", + value.type_name().tagged(value_tag), + )), } } } @@ -43,27 +53,27 @@ impl Add { impl Plugin for Add { fn config(&mut self) -> Result { Ok(Signature::build("add") - .desc("Add a new field to the table.") - .required("Field", SyntaxType::String) - .required("Value", SyntaxType::String) - .rest(SyntaxType::String).filter()) + .desc("Add a new column to the table.") + .required("column", SyntaxShape::ColumnPath, "the column name to add") + .required( + "value", + SyntaxShape::String, + "the value to give the cell(s)", + ) + .filter()) } fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { if let Some(args) = call_info.args.positional { match &args[0] { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s.clone()); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) + self.field = Some(table.as_column_path()?.item); } + + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } match &args[1] { Tagged { item: v, .. } => { diff --git a/src/plugins/average.rs b/src/plugins/average.rs new file mode 100644 index 0000000000..f78078450a --- /dev/null +++ b/src/plugins/average.rs @@ -0,0 +1,115 @@ +use nu::{ + serve_plugin, CallInfo, CoerceInto, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, + Signature, Tagged, TaggedItem, Value, +}; + +#[derive(Debug)] +struct Average { + total: Option>, + count: u64, +} + +impl Average { + fn new() -> Average { + Average { + total: None, + count: 0, + } + } + + fn average(&mut self, value: Tagged) -> Result<(), ShellError> { + match value.item() { + Value::Primitive(Primitive::Nothing) => Ok(()), + Value::Primitive(Primitive::Int(i)) => match &self.total { + Some(Tagged { + item: Value::Primitive(Primitive::Int(j)), + tag, + }) => { + self.total = Some(Value::int(i + j).tagged(tag)); + self.count += 1; + Ok(()) + } + None => { + self.total = Some(value.clone()); + self.count += 1; + Ok(()) + } + _ => Err(ShellError::labeled_error( + "Could calculate average of non-integer or unrelated types", + "source", + value.tag, + )), + }, + Value::Primitive(Primitive::Bytes(b)) => match &self.total { + Some(Tagged { + item: Value::Primitive(Primitive::Bytes(j)), + tag, + }) => { + self.total = Some(Value::bytes(b + j).tagged(tag)); + self.count += 1; + Ok(()) + } + None => { + self.total = Some(value); + self.count += 1; + Ok(()) + } + _ => Err(ShellError::labeled_error( + "Could calculate average of non-integer or unrelated types", + "source", + value.tag, + )), + }, + x => Err(ShellError::labeled_error( + format!("Unrecognized type in stream: {:?}", x), + "source", + value.tag, + )), + } + } +} + +impl Plugin for Average { + fn config(&mut self) -> Result { + Ok(Signature::build("average") + .desc("Compute the average of a column of numerical values.") + .filter()) + } + + fn begin_filter(&mut self, _: CallInfo) -> Result, ShellError> { + Ok(vec![]) + } + + fn filter(&mut self, input: Tagged) -> Result, ShellError> { + self.average(input)?; + Ok(vec![]) + } + + fn end_filter(&mut self) -> Result, ShellError> { + match self.total { + None => Ok(vec![]), + Some(ref inner) => match inner.item() { + Value::Primitive(Primitive::Int(i)) => { + let total: u64 = i + .tagged(inner.tag.clone()) + .coerce_into("converting for average")?; + let avg = total as f64 / self.count as f64; + let primitive_value: Value = Primitive::from(avg).into(); + let tagged_value = primitive_value.tagged(inner.tag.clone()); + Ok(vec![ReturnSuccess::value(tagged_value)]) + } + Value::Primitive(Primitive::Bytes(bytes)) => { + let avg = *bytes as f64 / self.count as f64; + let primitive_value: Value = Primitive::from(avg).into(); + let tagged_value = primitive_value.tagged(inner.tag.clone()); + Ok(vec![ReturnSuccess::value(tagged_value)]) + } + _ => Ok(vec![]), + }, + } + } +} + +fn main() { + serve_plugin(&mut Average::new()); +} diff --git a/src/plugins/binaryview.rs b/src/plugins/binaryview.rs index c321e8115c..0072df5b4d 100644 --- a/src/plugins/binaryview.rs +++ b/src/plugins/binaryview.rs @@ -1,5 +1,7 @@ use crossterm::{cursor, terminal, Attribute, RawScreen}; -use nu::{serve_plugin, CallInfo, Plugin, ShellError, Signature, SpanSource, Tagged, Value}; +use nu::{ + serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, Tagged, Value, +}; use pretty_hex::*; struct BinaryView; @@ -14,16 +16,15 @@ impl Plugin for BinaryView { fn config(&mut self) -> Result { Ok(Signature::build("binaryview") .desc("Autoview of binary data.") - .switch("lores")) + .switch("lores", "use low resolution output mode")) } fn sink(&mut self, call_info: CallInfo, input: Vec>) { for v in input { - let value_origin = v.origin(); + let value_anchor = v.anchor(); match v.item { - Value::Binary(b) => { - let source = value_origin.and_then(|x| call_info.source_map.get(&x)); - let _ = view_binary(&b, source, call_info.args.has("lores")); + Value::Primitive(Primitive::Binary(b)) => { + let _ = view_binary(&b, value_anchor.as_ref(), call_info.args.has("lores")); } _ => {} } @@ -33,7 +34,7 @@ impl Plugin for BinaryView { fn view_binary( b: &[u8], - source: Option<&SpanSource>, + source: Option<&AnchorLocation>, lores_mode: bool, ) -> Result<(), Box> { if b.len() > 3 { @@ -252,7 +253,7 @@ fn load_from_jpg_buffer(buffer: &[u8]) -> Option<(RawImageBuffer)> { pub fn view_contents( buffer: &[u8], - _source: Option<&SpanSource>, + _source: Option<&AnchorLocation>, lores_mode: bool, ) -> Result<(), Box> { let mut raw_image_buffer = load_from_png_buffer(buffer); @@ -339,12 +340,12 @@ pub fn view_contents( #[cfg(feature = "rawkey")] pub fn view_contents_interactive( buffer: &[u8], - source: Option<&SpanSource>, + source: Option<&AnchorLocation>, lores_mode: bool, ) -> Result<(), Box> { use rawkey::{KeyCode, RawKey}; - let sav_path = if let Some(SpanSource::File(f)) = source { + let sav_path = if let Some(AnchorLocation::File(f)) = source { let mut path = std::path::PathBuf::from(f); path.set_extension("sav"); Some(path) @@ -435,7 +436,7 @@ pub fn view_contents_interactive( let cursor = cursor(); let _ = cursor.show(); - let screen = RawScreen::disable_raw_mode(); + let _screen = RawScreen::disable_raw_mode(); Ok(()) } diff --git a/src/plugins/docker.rs b/src/plugins/docker.rs new file mode 100644 index 0000000000..e0a06ab3d4 --- /dev/null +++ b/src/plugins/docker.rs @@ -0,0 +1,115 @@ +use futures::executor::block_on; +use nu::{ + serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, + SyntaxShape, Tag, Tagged, TaggedDictBuilder, Value, +}; + +use std::process::Command; +use std::str; + +struct Docker; + +impl Docker { + fn new() -> Self { + Self + } +} + +async fn docker(sub_command: &String, name: Tag) -> Result>, ShellError> { + match sub_command.as_str() { + "ps" => docker_ps(name), + "images" => docker_images(name), + _ => Err(ShellError::labeled_error( + "Unsupported Docker command", + "unknown docker command", + name, + )), + } +} + +fn process_docker_output(cmd_output: &str, tag: Tag) -> Result>, ShellError> { + let columns: Vec<&str> = cmd_output.lines().collect(); + + let header: Vec<&str> = columns + .iter() + .take(1) + .next() + .unwrap() + .split_whitespace() + .collect(); + + let mut output = vec![]; + for line in columns.iter().skip(1) { + let values: Vec<&str> = line + .trim_end() + .split(" ") // Some columns values contains spaces to split by two spaces + .filter(|s| s.trim() != "") + .collect(); + + let mut dict = TaggedDictBuilder::new(&tag); + for (i, v) in values.iter().enumerate() { + dict.insert(header[i].to_string(), Value::string(v.trim().to_string())); + } + + output.push(dict.into_tagged_value()); + } + + Ok(output) +} + +pub fn docker_images(tag: Tag) -> Result>, ShellError> { + let output = Command::new("docker") + .arg("images") + .output() + .expect("failed to execute process."); + + let ps_output = str::from_utf8(&output.stdout).unwrap(); + let out = process_docker_output(ps_output, tag); + + out +} + +pub fn docker_ps(tag: Tag) -> Result>, ShellError> { + let output = Command::new("docker") + .arg("ps") + .output() + .expect("failed to execute process."); + + let ps_output = str::from_utf8(&output.stdout).unwrap(); + let out = process_docker_output(ps_output, tag); + + out +} + +impl Plugin for Docker { + fn config(&mut self) -> Result { + Ok(Signature::build("docker") + .required("sub_command", SyntaxShape::Member) + .filter()) + } + + fn begin_filter(&mut self, callinfo: CallInfo) -> Result, ShellError> { + if let Some(args) = callinfo.args.positional { + match &args[0] { + Tagged { + item: Value::Primitive(Primitive::String(command)), + .. + } => match block_on(docker(&command, args[0].tag())) { + Ok(v) => return Ok(v.into_iter().map(ReturnSuccess::value).collect()), + Err(e) => return Err(e), + }, + _ => return Err(ShellError::type_error("string", args[0].tagged_type_name())), + } + } + + Ok(vec![]) + } + + fn filter(&mut self, _: Tagged) -> Result, ShellError> { + Ok(vec![]) + } +} + +fn main() { + serve_plugin(&mut Docker::new()); +} diff --git a/src/plugins/edit.rs b/src/plugins/edit.rs index ab8a25aa68..78cb32cef3 100644 --- a/src/plugins/edit.rs +++ b/src/plugins/edit.rs @@ -1,10 +1,12 @@ use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxType, Tagged, Value, + serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape, + Tagged, Value, }; +pub type ColumnPath = Tagged>>; + struct Edit { - field: Option, + field: Option, value: Option, } impl Edit { @@ -18,23 +20,26 @@ impl Edit { fn edit(&self, value: Tagged) -> Result, ShellError> { let value_tag = value.tag(); match (value.item, self.value.clone()) { - (obj @ Value::Object(_), Some(v)) => match &self.field { - Some(f) => match obj.replace_data_at_path(value_tag, &f, v) { + (obj @ Value::Row(_), Some(v)) => match &self.field { + Some(f) => match obj.replace_data_at_column_path(value_tag, &f, v) { Some(v) => return Ok(v), None => { - return Err(ShellError::string( - "edit could not find place to insert field", + return Err(ShellError::labeled_error( + "edit could not find place to insert column", + "column name", + &f.tag, )) } }, - None => Err(ShellError::string( - "edit needs a field when adding a value to an object", + None => Err(ShellError::untagged_runtime_error( + "edit needs a column when changing a value in a table", )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::labeled_error( + "Unrecognized type in stream", + "original value", + value_tag, + )), } } } @@ -42,27 +47,30 @@ impl Edit { impl Plugin for Edit { fn config(&mut self) -> Result { Ok(Signature::build("edit") - .desc("Edit an existing field to have a new value.") - .required("Field", SyntaxType::String) - .required("Value", SyntaxType::String) + .desc("Edit an existing column to have a new value.") + .required( + "Field", + SyntaxShape::ColumnPath, + "the name of the column to edit", + ) + .required( + "Value", + SyntaxShape::String, + "the new value to give the cell(s)", + ) .filter()) } fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { if let Some(args) = call_info.args.positional { match &args[0] { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s.clone()); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) + self.field = Some(table.as_column_path()?); } + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } match &args[1] { Tagged { item: v, .. } => { diff --git a/src/plugins/embed.rs b/src/plugins/embed.rs index 3c7adb01bf..6dc539d107 100644 --- a/src/plugins/embed.rs +++ b/src/plugins/embed.rs @@ -1,6 +1,9 @@ +#[macro_use] +extern crate indexmap; + use nu::{ serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxType, Tag, Tagged, TaggedDictBuilder, Value, + SyntaxShape, Tag, Tagged, TaggedItem, Value, }; struct Embed { @@ -16,20 +19,8 @@ impl Embed { } fn embed(&mut self, value: Tagged) -> Result<(), ShellError> { - match value { - Tagged { item, tag } => match &self.field { - Some(_) => { - self.values.push(Tagged { - item: item, - tag: tag, - }); - Ok(()) - } - None => Err(ShellError::string( - "embed needs a field when embedding a value", - )), - }, - } + self.values.push(value); + Ok(()) } } @@ -37,8 +28,7 @@ impl Plugin for Embed { fn config(&mut self) -> Result { Ok(Signature::build("embed") .desc("Embeds a new field to the table.") - .required("Field", SyntaxType::String) - .rest(SyntaxType::String) + .optional("field", SyntaxShape::String, "the name of the new column") .filter()) } @@ -52,12 +42,7 @@ impl Plugin for Embed { self.field = Some(s.clone()); self.values = Vec::new(); } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - args[0] - ))) - } + value => return Err(ShellError::type_error("string", value.tagged_type_name())), } } @@ -70,15 +55,15 @@ impl Plugin for Embed { } fn end_filter(&mut self) -> Result, ShellError> { - let mut root = TaggedDictBuilder::new(Tag::unknown()); - root.insert_tagged( - self.field.as_ref().unwrap(), - Tagged { - item: Value::List(self.values.clone()), - tag: Tag::unknown(), - }, - ); - Ok(vec![ReturnSuccess::value(root.into_tagged_value())]) + let row = Value::row(indexmap! { + match &self.field { + Some(key) => key.clone(), + None => "root".into(), + } => Value::table(&self.values).tagged(Tag::unknown()), + }) + .tagged(Tag::unknown()); + + Ok(vec![ReturnSuccess::value(row)]) } } diff --git a/src/plugins/inc.rs b/src/plugins/inc.rs index fecba04e7d..ed0416ce43 100644 --- a/src/plugins/inc.rs +++ b/src/plugins/inc.rs @@ -1,6 +1,6 @@ use nu::{ serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxType, Tagged, TaggedItem, Value, + SyntaxShape, Tagged, TaggedItem, Value, }; enum Action { @@ -14,8 +14,10 @@ pub enum SemVerAction { Patch, } +pub type ColumnPath = Vec>; + struct Inc { - field: Option, + field: Option, error: Option, action: Option, } @@ -80,35 +82,53 @@ impl Inc { Value::Primitive(Primitive::Bytes(b)) => { Ok(Value::bytes(b + 1 as u64).tagged(value.tag())) } - Value::Primitive(Primitive::String(ref s)) => { - Ok(Tagged::from_item(self.apply(&s)?, value.tag())) + Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())), + Value::Table(values) => { + if values.len() == 1 { + return Ok(Value::Table(vec![self.inc(values[0].clone())?]).tagged(value.tag())); + } else { + return Err(ShellError::type_error( + "incrementable value", + value.tagged_type_name(), + )); + } } - Value::Object(_) => match self.field { + Value::Row(_) => match self.field { Some(ref f) => { - let replacement = match value.item.get_data_by_path(value.tag(), f) { + let replacement = match value.item.get_data_by_column_path(value.tag(), f) { Some(result) => self.inc(result.map(|x| x.clone()))?, None => { - return Err(ShellError::string("inc could not find field to replace")) + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + value.tag(), + )) } }; - match value - .item - .replace_data_at_path(value.tag(), f, replacement.item.clone()) - { + + match value.item.replace_data_at_column_path( + value.tag(), + f, + replacement.item.clone(), + ) { Some(v) => return Ok(v), None => { - return Err(ShellError::string("inc could not find field to replace")) + return Err(ShellError::labeled_error( + "inc could not find field to replace", + "column name", + value.tag(), + )) } } } - None => Err(ShellError::string( - "inc needs a field when incrementing a value in an object", + None => Err(ShellError::untagged_runtime_error( + "inc needs a field when incrementing a column in a table", )), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::type_error( + "incrementable value", + value.tagged_type_name(), + )), } } } @@ -116,11 +136,11 @@ impl Inc { impl Plugin for Inc { fn config(&mut self) -> Result { Ok(Signature::build("inc") - .desc("Increment a value or version. Optional use the field of a table.") - .switch("major") - .switch("minor") - .switch("patch") - .rest(SyntaxType::String) + .desc("Increment a value or version. Optionally use the column of a table.") + .switch("major", "increment the major version (eg 1.2.1 -> 2.0.0)") + .switch("minor", "increment the minor version (eg 1.2.1 -> 1.3.0)") + .switch("patch", "increment the patch version (eg 1.2.1 -> 1.2.2)") + .rest(SyntaxShape::ColumnPath, "the column(s) to update") .filter()) } @@ -138,18 +158,13 @@ impl Plugin for Inc { if let Some(args) = call_info.args.positional { for arg in args { match arg { - Tagged { - item: Value::Primitive(Primitive::String(s)), + table @ Tagged { + item: Value::Table(_), .. } => { - self.field = Some(s); - } - _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - arg - ))) + self.field = Some(table.as_column_path()?.item().to_vec()); } + value => return Err(ShellError::type_error("table", value.tagged_type_name())), } } } @@ -160,7 +175,11 @@ impl Plugin for Inc { match &self.error { Some(reason) => { - return Err(ShellError::string(format!("{}: {}", reason, Inc::usage()))) + return Err(ShellError::untagged_runtime_error(format!( + "{}: {}", + reason, + Inc::usage() + ))) } None => Ok(vec![]), } @@ -181,8 +200,8 @@ mod tests { use super::{Inc, SemVerAction}; use indexmap::IndexMap; use nu::{ - CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, SourceMap, Span, Tag, Tagged, - TaggedDictBuilder, TaggedItem, Value, + CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, TaggedItem, + Value, }; struct CallStub { @@ -201,22 +220,26 @@ mod tests { fn with_long_flag(&mut self, name: &str) -> &mut Self { self.flags.insert( name.to_string(), - Value::boolean(true).simple_spanned(Span::unknown()), + Value::boolean(true).tagged(Tag::unknown()), ); self } fn with_parameter(&mut self, name: &str) -> &mut Self { + let fields: Vec> = name + .split(".") + .map(|s| Value::string(s.to_string()).tagged(Tag::unknown())) + .collect(); + self.positionals - .push(Value::string(name.to_string()).simple_spanned(Span::unknown())); + .push(Value::Table(fields).tagged(Tag::unknown())); self } fn create(&self) -> CallInfo { CallInfo { args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())), - source_map: SourceMap::new(), - name_span: Span::unknown(), + name_tag: Tag::unknown(), } } } @@ -291,7 +314,12 @@ mod tests { .begin_filter(CallStub::new().with_parameter("package.version").create()) .is_ok()); - assert_eq!(plugin.field, Some("package.version".to_string())); + assert_eq!( + plugin + .field + .map(|f| f.iter().map(|f| f.item.clone()).collect()), + Some(vec!["package".to_string(), "version".to_string()]) + ); } #[test] @@ -333,7 +361,7 @@ mod tests { match output[0].as_ref().unwrap() { ReturnSuccess::Value(Tagged { - item: Value::Object(o), + item: Value::Row(o), .. }) => assert_eq!( *o.get_data(&String::from("version")).borrow(), @@ -361,7 +389,7 @@ mod tests { match output[0].as_ref().unwrap() { ReturnSuccess::Value(Tagged { - item: Value::Object(o), + item: Value::Row(o), .. }) => assert_eq!( *o.get_data(&String::from("version")).borrow(), @@ -390,7 +418,7 @@ mod tests { match output[0].as_ref().unwrap() { ReturnSuccess::Value(Tagged { - item: Value::Object(o), + item: Value::Row(o), .. }) => assert_eq!( *o.get_data(&field).borrow(), diff --git a/src/plugins/match.rs b/src/plugins/match.rs new file mode 100644 index 0000000000..eefbf10632 --- /dev/null +++ b/src/plugins/match.rs @@ -0,0 +1,106 @@ +use nu::{ + serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, + SyntaxShape, Tagged, Value, +}; +use regex::Regex; + +struct Match { + column: String, + regex: Regex, +} + +impl Match { + fn new() -> Self { + Match { + column: String::new(), + regex: Regex::new("").unwrap(), + } + } +} + +impl Plugin for Match { + fn config(&mut self) -> Result { + Ok(Signature::build("match") + .desc("filter rows by regex") + .required("member", SyntaxShape::Member, "the column name to match") + .required("regex", SyntaxShape::String, "the regex to match with") + .filter()) + } + fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { + if let Some(args) = call_info.args.positional { + match &args[0] { + Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + } => { + self.column = s.clone(); + } + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "value", + tag, + )); + } + } + match &args[1] { + Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + } => { + self.regex = Regex::new(s).unwrap(); + } + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "value", + tag, + )); + } + } + } + Ok(vec![]) + } + + fn filter(&mut self, input: Tagged) -> Result, ShellError> { + let flag: bool; + match &input { + Tagged { + item: Value::Row(dict), + tag, + } => { + if let Some(val) = dict.entries.get(&self.column) { + match val { + Tagged { + item: Value::Primitive(Primitive::String(s)), + .. + } => { + flag = self.regex.is_match(s); + } + Tagged { tag, .. } => { + return Err(ShellError::labeled_error("expected string", "value", tag)); + } + } + } else { + return Err(ShellError::labeled_error( + format!("column not in row! {:?} {:?}", &self.column, dict), + "row", + tag, + )); + } + } + Tagged { tag, .. } => { + return Err(ShellError::labeled_error("Expected row", "value", tag)); + } + } + if flag { + Ok(vec![Ok(ReturnSuccess::Value(input))]) + } else { + Ok(vec![]) + } + } +} + +fn main() { + serve_plugin(&mut Match::new()); +} diff --git a/src/plugins/ps.rs b/src/plugins/ps.rs new file mode 100644 index 0000000000..2db73d395a --- /dev/null +++ b/src/plugins/ps.rs @@ -0,0 +1,80 @@ +use futures::executor::block_on; +use futures::stream::{StreamExt, TryStreamExt}; + +use heim::process::{self as process, Process, ProcessResult}; +use heim::units::{ratio, Ratio}; +use std::usize; + +use nu::{ + serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, Tag, Tagged, + TaggedDictBuilder, Value, +}; +use std::time::Duration; + +struct Ps; +impl Ps { + fn new() -> Ps { + Ps + } +} + +async fn usage(process: Process) -> ProcessResult<(process::Process, Ratio)> { + let usage_1 = process.cpu_usage().await?; + futures_timer::Delay::new(Duration::from_millis(100)).await?; + let usage_2 = process.cpu_usage().await?; + + Ok((process, usage_2 - usage_1)) +} + +async fn ps(tag: Tag) -> Vec> { + let processes = process::processes() + .map_ok(|process| { + // Note that there is no `.await` here, + // as we want to pass the returned future + // into the `.try_buffer_unordered`. + usage(process) + }) + .try_buffer_unordered(usize::MAX); + pin_utils::pin_mut!(processes); + + let mut output = vec![]; + while let Some(res) = processes.next().await { + if let Ok((process, usage)) = res { + let mut dict = TaggedDictBuilder::new(&tag); + dict.insert("pid", Value::int(process.pid())); + if let Ok(name) = process.name().await { + dict.insert("name", Value::string(name)); + } + if let Ok(status) = process.status().await { + dict.insert("status", Value::string(format!("{:?}", status))); + } + dict.insert("cpu", Value::number(usage.get::())); + output.push(dict.into_tagged_value()); + } + } + + output +} + +impl Plugin for Ps { + fn config(&mut self) -> Result { + Ok(Signature::build("ps") + .desc("View information about system processes.") + .filter()) + } + + fn begin_filter(&mut self, callinfo: CallInfo) -> Result, ShellError> { + Ok(block_on(ps(callinfo.name_tag)) + .into_iter() + .map(ReturnSuccess::value) + .collect()) + } + + fn filter(&mut self, _: Tagged) -> Result, ShellError> { + Ok(vec![]) + } +} + +fn main() { + serve_plugin(&mut Ps::new()); +} diff --git a/src/plugins/read.rs b/src/plugins/read.rs new file mode 100644 index 0000000000..de88946e91 --- /dev/null +++ b/src/plugins/read.rs @@ -0,0 +1,156 @@ +use nu::{ + serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, + SyntaxShape, Tagged, TaggedDictBuilder, Value, +}; + +use nom::{ + bytes::complete::{tag, take_while}, + IResult, +}; +use regex::Regex; + +#[derive(Debug)] +enum ReadCommand { + Text(String), + Column(String), +} + +fn read(input: &str) -> IResult<&str, Vec> { + let mut output = vec![]; + + let mut loop_input = input; + loop { + let (input, before) = take_while(|c| c != '{')(loop_input)?; + if before.len() > 0 { + output.push(ReadCommand::Text(before.to_string())); + } + if input != "" { + // Look for column as we're now at one + let (input, _) = tag("{")(input)?; + let (input, column) = take_while(|c| c != '}')(input)?; + let (input, _) = tag("}")(input)?; + + output.push(ReadCommand::Column(column.to_string())); + loop_input = input; + } else { + loop_input = input; + } + if loop_input == "" { + break; + } + } + + Ok((loop_input, output)) +} + +fn column_names(commands: &[ReadCommand]) -> Vec { + let mut output = vec![]; + + for command in commands { + match command { + ReadCommand::Column(c) => { + output.push(c.clone()); + } + _ => {} + } + } + + output +} + +fn build_regex(commands: &[ReadCommand]) -> String { + let mut output = String::new(); + + for command in commands { + match command { + ReadCommand::Text(s) => { + output.push_str(&s.replace("(", "\\(")); + } + ReadCommand::Column(_) => { + output.push_str("(.*)"); + } + } + } + + return output; +} +struct Read { + regex: Regex, + column_names: Vec, +} + +impl Read { + fn new() -> Self { + Read { + regex: Regex::new("").unwrap(), + column_names: vec![], + } + } +} + +impl Plugin for Read { + fn config(&mut self) -> Result { + Ok(Signature::build("read") + .desc("Parse columns from string data using a simple pattern") + .required( + "pattern", + SyntaxShape::Any, + "the pattern to match. Eg) \"{foo}: {bar}\"", + ) + .filter()) + } + fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { + if let Some(args) = call_info.args.positional { + match &args[0] { + Tagged { + item: Value::Primitive(Primitive::String(pattern)), + .. + } => { + //self.pattern = s.clone(); + let read_pattern = read(&pattern).unwrap(); + let read_regex = build_regex(&read_pattern.1); + + self.column_names = column_names(&read_pattern.1); + + self.regex = Regex::new(&read_regex).unwrap(); + } + Tagged { tag, .. } => { + return Err(ShellError::labeled_error( + "Unrecognized type in params", + "value", + tag, + )); + } + } + } + Ok(vec![]) + } + + fn filter(&mut self, input: Tagged) -> Result, ShellError> { + let mut results = vec![]; + match &input { + Tagged { + tag, + item: Value::Primitive(Primitive::String(s)), + } => { + //self.full_input.push_str(&s); + + for cap in self.regex.captures_iter(&s) { + let mut dict = TaggedDictBuilder::new(tag); + + for (idx, column_name) in self.column_names.iter().enumerate() { + dict.insert(column_name, Value::string(&cap[idx + 1].to_string())); + } + + results.push(ReturnSuccess::value(dict.into_tagged_value())); + } + } + _ => {} + } + Ok(results) + } +} + +fn main() { + serve_plugin(&mut Read::new()); +} diff --git a/src/plugins/skip.rs b/src/plugins/skip.rs index cb259e99b9..5ec290fe04 100644 --- a/src/plugins/skip.rs +++ b/src/plugins/skip.rs @@ -1,6 +1,6 @@ use nu::{ serve_plugin, CallInfo, CoerceInto, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, - Signature, SyntaxType, Tagged, TaggedItem, Value, + Signature, SyntaxShape, Tagged, TaggedItem, Value, }; struct Skip { @@ -15,9 +15,9 @@ impl Skip { impl Plugin for Skip { fn config(&mut self) -> Result { - Ok(Signature::build("skip") + Ok(Signature::build("skip") .desc("Skip a number of rows") - .rest(SyntaxType::Number) + .rest(SyntaxShape::Number, "the number of rows to skip") .filter()) } fn begin_filter(&mut self, call_info: CallInfo) -> Result, ShellError> { @@ -34,7 +34,7 @@ impl Plugin for Skip { return Err(ShellError::labeled_error( "Unrecognized type in params", "expected an integer", - arg.span(), + arg.tag(), )) } } diff --git a/src/plugins/str.rs b/src/plugins/str.rs index d47fec0f22..8260bdac2c 100644 --- a/src/plugins/str.rs +++ b/src/plugins/str.rs @@ -1,25 +1,19 @@ use nu::{ serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - SyntaxType, Tagged, Value, + SyntaxShape, Tagged, TaggedItem, Value, }; -use regex::Regex; #[derive(Debug, Eq, PartialEq)] enum Action { Downcase, Upcase, ToInteger, - Replace(ReplaceAction), } -#[derive(Debug, Eq, PartialEq)] -enum ReplaceAction { - Direct, - FindAndReplace, -} +pub type ColumnPath = Vec>; struct Str { - field: Option, + field: Option, params: Option>, error: Option, action: Option, @@ -45,43 +39,14 @@ impl Str { Err(_) => Value::string(input), }, }, - Some(Action::Replace(ref mode)) => match mode { - ReplaceAction::Direct => Value::string(self.first_param()), - ReplaceAction::FindAndReplace => { - let regex = Regex::new(self.first_param()); - - match regex { - Ok(re) => Value::string(re.replace(input, self.second_param()).to_owned()), - Err(_) => Value::string(input), - } - } - }, None => Value::string(input), }; Ok(applied) } - fn did_supply_field(&self) -> bool { - self.field.is_some() - } - - fn first_param(&self) -> &str { - let idx = if self.did_supply_field() { 1 } else { 0 }; - self.get_param(idx) - } - - fn second_param(&self) -> &str { - let idx = if self.did_supply_field() { 2 } else { 1 }; - self.get_param(idx) - } - - fn get_param(&self, idx: usize) -> &str { - self.params.as_ref().unwrap().get(idx).unwrap().as_str() - } - - fn for_field(&mut self, field: &str) { - self.field = Some(String::from(field)); + fn for_field(&mut self, column_path: ColumnPath) { + self.field = Some(column_path); } fn permit(&mut self) -> bool { @@ -92,14 +57,6 @@ impl Str { self.error = Some(message.to_string()); } - fn for_replace(&mut self, mode: ReplaceAction) { - if self.permit() { - self.action = Some(Action::Replace(mode)); - } else { - self.log_error("can only apply one"); - } - } - fn for_to_int(&mut self) { if self.permit() { self.action = Some(Action::ToInteger); @@ -125,42 +82,45 @@ impl Str { } pub fn usage() -> &'static str { - "Usage: str field [--downcase|--upcase|--to-int|--replace|--find-replace]" + "Usage: str field [--downcase|--upcase|--to-int]" } } impl Str { fn strutils(&self, value: Tagged) -> Result, ShellError> { match value.item { - Value::Primitive(Primitive::String(ref s)) => { - Ok(Tagged::from_item(self.apply(&s)?, value.tag())) - } - Value::Object(_) => match self.field { + Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())), + Value::Row(_) => match self.field { Some(ref f) => { - let replacement = match value.item.get_data_by_path(value.tag(), f) { + let replacement = match value.item.get_data_by_column_path(value.tag(), f) { Some(result) => self.strutils(result.map(|x| x.clone()))?, - None => return Ok(Tagged::from_item(Value::nothing(), value.tag)), + None => return Ok(Value::nothing().tagged(value.tag)), }; - match value - .item - .replace_data_at_path(value.tag(), f, replacement.item.clone()) - { + match value.item.replace_data_at_column_path( + value.tag(), + f, + replacement.item.clone(), + ) { Some(v) => return Ok(v), None => { - return Err(ShellError::string("str could not find field to replace")) + return Err(ShellError::type_error( + "column name", + value.tagged_type_name(), + )) } } } - None => Err(ShellError::string(format!( + None => Err(ShellError::untagged_runtime_error(format!( "{}: {}", - "str needs a field when applying it to a value in an object", + "str needs a column when applied to a value in a row", Str::usage() ))), }, - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + _ => Err(ShellError::labeled_error( + "Unrecognized type in stream", + value.type_name(), + value.tag, + )), } } } @@ -168,13 +128,11 @@ impl Str { impl Plugin for Str { fn config(&mut self) -> Result { Ok(Signature::build("str") - .desc("Apply string function. Optional use the field of a table") - .switch("downcase") - .switch("upcase") - .switch("to-int") - .switch("replace") - .switch("find-replace") - .rest(SyntaxType::Member) + .desc("Apply string function. Optional use the column of a table") + .switch("downcase", "convert string to lowercase") + .switch("upcase", "convert string to uppercase") + .switch("to-int", "convert string to integer") + .rest(SyntaxShape::ColumnPath, "the column(s) to convert") .filter()) } @@ -190,41 +148,32 @@ impl Plugin for Str { if args.has("to-int") { self.for_to_int(); } - if args.has("replace") { - self.for_replace(ReplaceAction::Direct); - } - if args.has("find-replace") { - self.for_replace(ReplaceAction::FindAndReplace); - } if let Some(possible_field) = args.nth(0) { match possible_field { Tagged { item: Value::Primitive(Primitive::String(s)), - .. + tag, } => match self.action { - Some(Action::Replace(ReplaceAction::Direct)) => { - if args.len() == 2 { - self.for_field(&s); - } - } - Some(Action::Replace(ReplaceAction::FindAndReplace)) => { - if args.len() == 3 { - self.for_field(&s); - } - } Some(Action::Downcase) | Some(Action::Upcase) | Some(Action::ToInteger) | None => { - self.for_field(&s); + self.for_field(vec![s.clone().tagged(tag)]); } }, + table @ Tagged { + item: Value::Table(_), + .. + } => { + self.field = Some(table.as_column_path()?.item); + } _ => { - return Err(ShellError::string(format!( - "Unrecognized type in params: {:?}", - possible_field - ))) + return Err(ShellError::labeled_error( + "Unrecognized type in params", + possible_field.type_name(), + &possible_field.tag, + )) } } } @@ -241,7 +190,11 @@ impl Plugin for Str { match &self.error { Some(reason) => { - return Err(ShellError::string(format!("{}: {}", reason, Str::usage()))) + return Err(ShellError::untagged_runtime_error(format!( + "{}: {}", + reason, + Str::usage() + ))) } None => Ok(vec![]), } @@ -258,24 +211,14 @@ fn main() { #[cfg(test)] mod tests { - use super::{Action, ReplaceAction, Str}; + use super::{Action, Str}; use indexmap::IndexMap; use nu::{ - CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, SourceMap, Span, Tag, Tagged, - TaggedDictBuilder, TaggedItem, Value, + CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, + TaggedItem, Value, }; use num_bigint::BigInt; - impl Str { - fn replace_with(&mut self, value: &str) { - self.params.as_mut().unwrap().push(value.to_string()); - } - - fn find_with(&mut self, search: &str) { - self.params.as_mut().unwrap().push(search.to_string()); - } - } - struct CallStub { positionals: Vec>, flags: IndexMap>, @@ -292,22 +235,26 @@ mod tests { fn with_long_flag(&mut self, name: &str) -> &mut Self { self.flags.insert( name.to_string(), - Value::boolean(true).simple_spanned(Span::unknown()), + Value::boolean(true).tagged(Tag::unknown()), ); self } fn with_parameter(&mut self, name: &str) -> &mut Self { + let fields: Vec> = name + .split(".") + .map(|s| Value::string(s.to_string()).tagged(Tag::unknown())) + .collect(); + self.positionals - .push(Value::string(name.to_string()).simple_spanned(Span::unknown())); + .push(Value::Table(fields).tagged(Tag::unknown())); self } fn create(&self) -> CallInfo { CallInfo { args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())), - source_map: SourceMap::new(), - name_span: Span::unknown(), + name_tag: Tag::unknown(), } } } @@ -319,7 +266,7 @@ mod tests { } fn unstructured_sample_record(value: &str) -> Tagged { - Tagged::from_item(Value::string(value), Tag::unknown()) + Value::string(value).tagged(Tag::unknown()) } #[test] @@ -328,7 +275,7 @@ mod tests { let configured = plugin.config().unwrap(); - for action_flag in &["downcase", "upcase", "to-int", "replace", "find-replace"] { + for action_flag in &["downcase", "upcase", "to-int"] { assert!(configured.named.get(*action_flag).is_some()); } } @@ -362,33 +309,6 @@ mod tests { .is_ok()); assert_eq!(plugin.action.unwrap(), Action::ToInteger); } - - #[test] - fn str_plugin_accepts_replace() { - let mut plugin = Str::new(); - - assert!(plugin - .begin_filter(CallStub::new().with_long_flag("replace").create()) - .is_ok()); - assert_eq!( - plugin.action.unwrap(), - Action::Replace(ReplaceAction::Direct) - ); - } - - #[test] - fn str_plugin_accepts_find_replace() { - let mut plugin = Str::new(); - - assert!(plugin - .begin_filter(CallStub::new().with_long_flag("find-replace").create()) - .is_ok()); - assert_eq!( - plugin.action.unwrap(), - Action::Replace(ReplaceAction::FindAndReplace) - ); - } - #[test] fn str_plugin_accepts_field() { let mut plugin = Str::new(); @@ -401,7 +321,12 @@ mod tests { ) .is_ok()); - assert_eq!(plugin.field, Some("package.description".to_string())); + assert_eq!( + plugin + .field + .map(|f| f.into_iter().map(|f| f.item).collect()), + Some(vec!["package".to_string(), "description".to_string()]) + ) } #[test] @@ -441,26 +366,6 @@ mod tests { assert_eq!(strutils.apply("9999").unwrap(), Value::int(9999 as i64)); } - #[test] - fn str_replace() { - let mut strutils = Str::new(); - strutils.for_replace(ReplaceAction::Direct); - strutils.replace_with("robalino"); - assert_eq!(strutils.apply("andres").unwrap(), Value::string("robalino")); - } - - #[test] - fn str_find_replace() { - let mut strutils = Str::new(); - strutils.for_replace(ReplaceAction::FindAndReplace); - strutils.find_with(r"kittens"); - strutils.replace_with("jotandrehuda"); - assert_eq!( - strutils.apply("wykittens").unwrap(), - Value::string("wyjotandrehuda") - ); - } - #[test] fn str_plugin_applies_upcase_with_field() { let mut plugin = Str::new(); @@ -479,7 +384,7 @@ mod tests { match output[0].as_ref().unwrap() { ReturnSuccess::Value(Tagged { - item: Value::Object(o), + item: Value::Row(o), .. }) => assert_eq!( *o.get_data(&String::from("name")).borrow(), @@ -527,7 +432,7 @@ mod tests { match output[0].as_ref().unwrap() { ReturnSuccess::Value(Tagged { - item: Value::Object(o), + item: Value::Row(o), .. }) => assert_eq!( *o.get_data(&String::from("name")).borrow(), @@ -575,7 +480,7 @@ mod tests { match output[0].as_ref().unwrap() { ReturnSuccess::Value(Tagged { - item: Value::Object(o), + item: Value::Row(o), .. }) => assert_eq!( *o.get_data(&String::from("Nu_birthday")).borrow(), @@ -604,114 +509,4 @@ mod tests { _ => {} } } - - #[test] - fn str_plugin_applies_replace_with_field() { - let mut plugin = Str::new(); - - assert!(plugin - .begin_filter( - CallStub::new() - .with_parameter("rustconf") - .with_parameter("22nd August 2019") - .with_long_flag("replace") - .create() - ) - .is_ok()); - - let subject = structured_sample_record("rustconf", "1st January 1970"); - let output = plugin.filter(subject).unwrap(); - - match output[0].as_ref().unwrap() { - ReturnSuccess::Value(Tagged { - item: Value::Object(o), - .. - }) => assert_eq!( - *o.get_data(&String::from("rustconf")).borrow(), - Value::string(String::from("22nd August 2019")) - ), - _ => {} - } - } - - #[test] - fn str_plugin_applies_replace_without_field() { - let mut plugin = Str::new(); - - assert!(plugin - .begin_filter( - CallStub::new() - .with_parameter("22nd August 2019") - .with_long_flag("replace") - .create() - ) - .is_ok()); - - let subject = unstructured_sample_record("1st January 1970"); - let output = plugin.filter(subject).unwrap(); - - match output[0].as_ref().unwrap() { - ReturnSuccess::Value(Tagged { - item: Value::Primitive(Primitive::String(s)), - .. - }) => assert_eq!(*s, String::from("22nd August 2019")), - _ => {} - } - } - - #[test] - fn str_plugin_applies_find_replace_with_field() { - let mut plugin = Str::new(); - - assert!(plugin - .begin_filter( - CallStub::new() - .with_parameter("staff") - .with_parameter("kittens") - .with_parameter("jotandrehuda") - .with_long_flag("find-replace") - .create() - ) - .is_ok()); - - let subject = structured_sample_record("staff", "wykittens"); - let output = plugin.filter(subject).unwrap(); - - match output[0].as_ref().unwrap() { - ReturnSuccess::Value(Tagged { - item: Value::Object(o), - .. - }) => assert_eq!( - *o.get_data(&String::from("staff")).borrow(), - Value::string(String::from("wyjotandrehuda")) - ), - _ => {} - } - } - - #[test] - fn str_plugin_applies_find_replace_without_field() { - let mut plugin = Str::new(); - - assert!(plugin - .begin_filter( - CallStub::new() - .with_parameter("kittens") - .with_parameter("jotandrehuda") - .with_long_flag("find-replace") - .create() - ) - .is_ok()); - - let subject = unstructured_sample_record("wykittens"); - let output = plugin.filter(subject).unwrap(); - - match output[0].as_ref().unwrap() { - ReturnSuccess::Value(Tagged { - item: Value::Primitive(Primitive::String(s)), - .. - }) => assert_eq!(*s, String::from("wyjotandrehuda")), - _ => {} - } - } } diff --git a/src/plugins/sum.rs b/src/plugins/sum.rs index 32ecd7a9ce..d08d45713d 100644 --- a/src/plugins/sum.rs +++ b/src/plugins/sum.rs @@ -1,6 +1,6 @@ use nu::{ serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature, - Tag, Tagged, Value, + Tagged, TaggedItem, Value, }; struct Sum { @@ -18,46 +18,49 @@ impl Sum { match &self.total { Some(Tagged { item: Value::Primitive(Primitive::Int(j)), - tag: Tag { span, .. }, + tag, }) => { //TODO: handle overflow - self.total = - Some(Tagged::from_simple_spanned_item(Value::int(i + j), span)); + self.total = Some(Value::int(i + j).tagged(tag)); Ok(()) } None => { self.total = Some(value.clone()); Ok(()) } - _ => Err(ShellError::string(format!( - "Could not sum non-integer or unrelated types" - ))), + _ => Err(ShellError::labeled_error( + "Could not sum non-integer or unrelated types", + "source", + value.tag, + )), } } Value::Primitive(Primitive::Bytes(b)) => { - match self.total { + match &self.total { Some(Tagged { item: Value::Primitive(Primitive::Bytes(j)), - tag: Tag { span, .. }, + tag, }) => { //TODO: handle overflow - self.total = - Some(Tagged::from_simple_spanned_item(Value::bytes(b + j), span)); + self.total = Some(Value::bytes(b + j).tagged(tag)); Ok(()) } None => { self.total = Some(value); Ok(()) } - _ => Err(ShellError::string(format!( - "Could not sum non-integer or unrelated types" - ))), + _ => Err(ShellError::labeled_error( + "Could not sum non-integer or unrelated types", + "source", + value.tag, + )), } } - x => Err(ShellError::string(format!( - "Unrecognized type in stream: {:?}", - x - ))), + x => Err(ShellError::labeled_error( + format!("Unrecognized type in stream: {:?}", x), + "source", + value.tag, + )), } } } diff --git a/src/plugins/sys.rs b/src/plugins/sys.rs index 41b280ffe4..55bf5028bf 100644 --- a/src/plugins/sys.rs +++ b/src/plugins/sys.rs @@ -80,7 +80,7 @@ async fn mem(tag: Tag) -> Tagged { } async fn host(tag: Tag) -> Tagged { - let mut dict = TaggedDictBuilder::with_capacity(tag, 6); + let mut dict = TaggedDictBuilder::with_capacity(&tag, 6); let (platform_result, uptime_result) = futures::future::join(host::platform(), host::uptime()).await; @@ -95,7 +95,7 @@ async fn host(tag: Tag) -> Tagged { // Uptime if let Ok(uptime) = uptime_result { - let mut uptime_dict = TaggedDictBuilder::with_capacity(tag, 4); + let mut uptime_dict = TaggedDictBuilder::with_capacity(&tag, 4); let uptime = uptime.get::().round() as i64; let days = uptime / (60 * 60 * 24); @@ -116,10 +116,13 @@ async fn host(tag: Tag) -> Tagged { let mut user_vec = vec![]; while let Some(user) = users.next().await { if let Ok(user) = user { - user_vec.push(Tagged::from_item(Value::string(user.username()), tag)); + user_vec.push(Tagged { + item: Value::string(user.username()), + tag: tag.clone(), + }); } } - let user_list = Value::List(user_vec); + let user_list = Value::Table(user_vec); dict.insert("users", user_list); dict.into_tagged_value() @@ -130,7 +133,7 @@ async fn disks(tag: Tag) -> Option { let mut partitions = disk::partitions_physical(); while let Some(part) = partitions.next().await { if let Ok(part) = part { - let mut dict = TaggedDictBuilder::with_capacity(tag, 6); + let mut dict = TaggedDictBuilder::with_capacity(&tag, 6); dict.insert( "device", Value::string( @@ -163,7 +166,7 @@ async fn disks(tag: Tag) -> Option { } if !output.is_empty() { - Some(Value::List(output)) + Some(Value::Table(output)) } else { None } @@ -176,7 +179,7 @@ async fn battery(tag: Tag) -> Option { if let Ok(batteries) = manager.batteries() { for battery in batteries { if let Ok(battery) = battery { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); if let Some(vendor) = battery.vendor() { dict.insert("vendor", Value::string(vendor)); } @@ -205,7 +208,7 @@ async fn battery(tag: Tag) -> Option { } if !output.is_empty() { - Some(Value::List(output)) + Some(Value::Table(output)) } else { None } @@ -217,7 +220,7 @@ async fn temp(tag: Tag) -> Option { let mut sensors = sensors::temperatures(); while let Some(sensor) = sensors.next().await { if let Ok(sensor) = sensor { - let mut dict = TaggedDictBuilder::new(tag); + let mut dict = TaggedDictBuilder::new(&tag); dict.insert("unit", Value::string(sensor.unit())); if let Some(label) = sensor.label() { dict.insert("label", Value::string(label)); @@ -248,7 +251,7 @@ async fn temp(tag: Tag) -> Option { } if !output.is_empty() { - Some(Value::List(output)) + Some(Value::Table(output)) } else { None } @@ -259,7 +262,7 @@ async fn net(tag: Tag) -> Option { let mut io_counters = net::io_counters(); while let Some(nic) = io_counters.next().await { if let Ok(nic) = nic { - let mut network_idx = TaggedDictBuilder::with_capacity(tag, 3); + let mut network_idx = TaggedDictBuilder::with_capacity(&tag, 3); network_idx.insert("name", Value::string(nic.interface())); network_idx.insert( "sent", @@ -273,18 +276,24 @@ async fn net(tag: Tag) -> Option { } } if !output.is_empty() { - Some(Value::List(output)) + Some(Value::Table(output)) } else { None } } async fn sysinfo(tag: Tag) -> Vec> { - let mut sysinfo = TaggedDictBuilder::with_capacity(tag, 7); + let mut sysinfo = TaggedDictBuilder::with_capacity(&tag, 7); - let (host, cpu, disks, memory, temp) = - futures::future::join5(host(tag), cpu(tag), disks(tag), mem(tag), temp(tag)).await; - let (net, battery) = futures::future::join(net(tag), battery(tag)).await; + let (host, cpu, disks, memory, temp) = futures::future::join5( + host(tag.clone()), + cpu(tag.clone()), + disks(tag.clone()), + mem(tag.clone()), + temp(tag.clone()), + ) + .await; + let (net, battery) = futures::future::join(net(tag.clone()), battery(tag.clone())).await; sysinfo.insert_tagged("host", host); if let Some(cpu) = cpu { @@ -315,7 +324,7 @@ impl Plugin for Sys { } fn begin_filter(&mut self, callinfo: CallInfo) -> Result, ShellError> { - Ok(block_on(sysinfo(Tag::unknown_origin(callinfo.name_span))) + Ok(block_on(sysinfo(callinfo.name_tag)) .into_iter() .map(ReturnSuccess::value) .collect()) diff --git a/src/plugins/textview.rs b/src/plugins/textview.rs index 423cae8765..88507183e0 100644 --- a/src/plugins/textview.rs +++ b/src/plugins/textview.rs @@ -1,8 +1,7 @@ use crossterm::{cursor, terminal, RawScreen}; use crossterm::{InputEvent, KeyEvent}; use nu::{ - serve_plugin, CallInfo, Plugin, Primitive, ShellError, Signature, SourceMap, SpanSource, - Tagged, Value, + serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, Tagged, Value, }; use syntect::easy::HighlightLines; @@ -29,8 +28,8 @@ impl Plugin for TextView { Ok(Signature::build("textview").desc("Autoview of text data.")) } - fn sink(&mut self, call_info: CallInfo, input: Vec>) { - view_text_value(&input[0], &call_info.source_map); + fn sink(&mut self, _call_info: CallInfo, input: Vec>) { + view_text_value(&input[0]); } } @@ -150,7 +149,7 @@ fn scroll_view_lines_if_needed(draw_commands: Vec, use_color_buffer KeyEvent::Esc => { break; } - KeyEvent::Up => { + KeyEvent::Up | KeyEvent::Char('k') => { if starting_row > 0 { starting_row -= 1; max_bottom_line = paint_textview( @@ -160,19 +159,19 @@ fn scroll_view_lines_if_needed(draw_commands: Vec, use_color_buffer ); } } - KeyEvent::Down => { + KeyEvent::Down | KeyEvent::Char('j') => { if starting_row < (max_bottom_line - height) { starting_row += 1; } max_bottom_line = paint_textview(&draw_commands, starting_row, use_color_buffer); } - KeyEvent::PageUp => { + KeyEvent::PageUp | KeyEvent::Ctrl('b') => { starting_row -= std::cmp::min(height, starting_row); max_bottom_line = paint_textview(&draw_commands, starting_row, use_color_buffer); } - KeyEvent::PageDown | KeyEvent::Char(' ') => { + KeyEvent::PageDown | KeyEvent::Ctrl('f') | KeyEvent::Char(' ') => { if starting_row < (max_bottom_line - height) { starting_row += height; @@ -215,20 +214,18 @@ fn scroll_view(s: &str) { scroll_view_lines_if_needed(v, false); } -fn view_text_value(value: &Tagged, source_map: &SourceMap) { - let value_origin = value.origin(); +fn view_text_value(value: &Tagged) { + let value_anchor = value.anchor(); match value.item { Value::Primitive(Primitive::String(ref s)) => { - let source = value_origin.and_then(|x| source_map.get(&x)); - - if let Some(source) = source { + if let Some(source) = value_anchor { let extension: Option = match source { - SpanSource::File(file) => { - let path = Path::new(file); + AnchorLocation::File(file) => { + let path = Path::new(&file); path.extension().map(|x| x.to_string_lossy().to_string()) } - SpanSource::Url(url) => { - let url = url::Url::parse(url); + AnchorLocation::Url(url) => { + let url = url::Url::parse(&url); if let Ok(url) = url { let url = url.clone(); if let Some(mut segments) = url.path_segments() { @@ -246,7 +243,7 @@ fn view_text_value(value: &Tagged, source_map: &SourceMap) { } } //FIXME: this probably isn't correct - SpanSource::Source(_source) => None, + AnchorLocation::Source(_source) => None, }; match extension { diff --git a/src/plugins/tree.rs b/src/plugins/tree.rs index 19e38b626c..3d571ca18c 100644 --- a/src/plugins/tree.rs +++ b/src/plugins/tree.rs @@ -17,14 +17,14 @@ impl TreeView { Value::Primitive(p) => { let _ = builder.add_empty_child(p.format(None)); } - Value::Object(o) => { + Value::Row(o) => { for (k, v) in o.entries.iter() { builder = builder.begin_child(k.clone()); Self::from_value_helper(v, builder); builder = builder.end_child(); } } - Value::List(l) => { + Value::Table(l) => { for elem in l.iter() { Self::from_value_helper(elem, builder); } diff --git a/src/prelude.rs b/src/prelude.rs index f800dc8cda..4b12a07bda 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -1,3 +1,13 @@ +#[macro_export] +macro_rules! return_err { + ($expr:expr) => { + match $expr { + Err(_) => return, + Ok(expr) => expr, + }; + }; +} + #[macro_export] macro_rules! stream { ($($expr:expr),*) => {{ @@ -54,29 +64,28 @@ pub(crate) use crate::commands::command::{ pub(crate) use crate::commands::PerItemCommand; pub(crate) use crate::commands::RawCommandArgs; pub(crate) use crate::context::CommandRegistry; -pub(crate) use crate::context::{Context, SpanSource}; +pub(crate) use crate::context::{AnchorLocation, Context}; +pub(crate) use crate::data::base as value; +pub(crate) use crate::data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem}; +pub(crate) use crate::data::types::ExtractType; +pub(crate) use crate::data::{Primitive, Value}; pub(crate) use crate::env::host::handle_unexpected; pub(crate) use crate::env::Host; pub(crate) use crate::errors::{CoerceInto, ShellError}; -pub(crate) use crate::object::base as value; -pub(crate) use crate::object::meta::{Tag, Tagged, TaggedItem}; -pub(crate) use crate::object::types::ExtractType; -pub(crate) use crate::object::{Primitive, Value}; -pub(crate) use crate::parser::hir::SyntaxType; +pub(crate) use crate::parser::hir::SyntaxShape; pub(crate) use crate::parser::parse::parser::Number; pub(crate) use crate::parser::registry::Signature; pub(crate) use crate::shell::filesystem_shell::FilesystemShell; +pub(crate) use crate::shell::help_shell::HelpShell; pub(crate) use crate::shell::shell_manager::ShellManager; pub(crate) use crate::shell::value_shell::ValueShell; -pub(crate) use crate::shell::help_shell::HelpShell; pub(crate) use crate::stream::{InputStream, OutputStream}; -pub(crate) use crate::traits::{HasSpan, ToDebug}; -pub(crate) use crate::Span; +pub(crate) use crate::traits::{HasTag, ToDebug}; pub(crate) use crate::Text; +pub(crate) use async_stream::stream as async_stream; pub(crate) use bigdecimal::BigDecimal; pub(crate) use futures::stream::BoxStream; pub(crate) use futures::{FutureExt, Stream, StreamExt}; -pub(crate) use futures_async_stream::async_stream_block; pub(crate) use num_bigint::BigInt; pub(crate) use num_traits::cast::{FromPrimitive, ToPrimitive}; pub(crate) use num_traits::identities::Zero; @@ -100,6 +109,22 @@ where } } +pub trait ToInputStream { + fn to_input_stream(self) -> InputStream; +} + +impl ToInputStream for T +where + T: Stream + Send + 'static, + U: Into, ShellError>>, +{ + fn to_input_stream(self) -> InputStream { + InputStream { + values: self.map(|item| item.into().unwrap()).boxed(), + } + } +} + pub trait ToOutputStream { fn to_output_stream(self) -> OutputStream; } diff --git a/src/shell.rs b/src/shell.rs index caa1443ac7..14ec1c6755 100644 --- a/src/shell.rs +++ b/src/shell.rs @@ -1,9 +1,9 @@ pub(crate) mod completer; pub(crate) mod filesystem_shell; +pub(crate) mod help_shell; pub(crate) mod helper; pub(crate) mod shell; pub(crate) mod shell_manager; pub(crate) mod value_shell; -pub(crate) mod help_shell; pub(crate) use helper::Helper; diff --git a/src/shell/filesystem_shell.rs b/src/shell/filesystem_shell.rs index 1bb7796b5d..7b8310141c 100644 --- a/src/shell/filesystem_shell.rs +++ b/src/shell/filesystem_shell.rs @@ -3,8 +3,7 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; -use crate::object::dir_entry_dict; +use crate::data::dir_entry_dict; use crate::prelude::*; use crate::shell::completer::NuCompleter; use crate::shell::shell::Shell; @@ -12,9 +11,12 @@ use crate::utils::FileStructure; use rustyline::completion::FilenameCompleter; use rustyline::hint::{Hinter, HistoryHinter}; use std::path::{Path, PathBuf}; +use std::sync::atomic::Ordering; +use trash as SendToTrash; pub struct FilesystemShell { pub(crate) path: String, + pub(crate) last_path: String, completer: NuCompleter, hinter: HistoryHinter, } @@ -29,6 +31,7 @@ impl Clone for FilesystemShell { fn clone(&self) -> Self { FilesystemShell { path: self.path.clone(), + last_path: self.path.clone(), completer: NuCompleter { file_completer: FilenameCompleter::new(), commands: self.completer.commands.clone(), @@ -44,6 +47,7 @@ impl FilesystemShell { Ok(FilesystemShell { path: path.to_string_lossy().to_string(), + last_path: path.to_string_lossy().to_string(), completer: NuCompleter { file_completer: FilenameCompleter::new(), commands, @@ -56,8 +60,10 @@ impl FilesystemShell { path: String, commands: CommandRegistry, ) -> Result { + let last_path = path.clone(); Ok(FilesystemShell { path, + last_path, completer: NuCompleter { file_completer: FilenameCompleter::new(), commands, @@ -68,7 +74,7 @@ impl FilesystemShell { } impl Shell for FilesystemShell { - fn name(&self, _source_map: &SourceMap) -> String { + fn name(&self) -> String { "filesystem".to_string() } @@ -76,95 +82,107 @@ impl Shell for FilesystemShell { dirs::home_dir() } - fn ls(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { + fn ls( + &self, + pattern: Option>, + context: &RunnableContext, + ) -> Result { let cwd = self.path(); let mut full_path = PathBuf::from(self.path()); - match &args.nth(0) { - Some(value) => full_path.push(Path::new(&value.as_path()?)), + match &pattern { + Some(value) => full_path.push((*value).as_ref()), _ => {} } - let entries: Vec<_> = match glob::glob(&full_path.to_string_lossy()) { - Ok(files) => files.collect(), + let ctrl_c = context.ctrl_c.clone(); + let name_tag = context.name.clone(); + + //If it's not a glob, try to display the contents of the entry if it's a directory + let lossy_path = full_path.to_string_lossy(); + if !lossy_path.contains("*") && !lossy_path.contains("?") { + let entry = Path::new(&full_path); + if entry.is_dir() { + let entries = std::fs::read_dir(&entry); + let entries = match entries { + Err(e) => { + if let Some(s) = pattern { + return Err(ShellError::labeled_error( + e.to_string(), + e.to_string(), + s.tag(), + )); + } else { + return Err(ShellError::labeled_error( + e.to_string(), + e.to_string(), + name_tag, + )); + } + } + Ok(o) => o, + }; + let stream = async_stream! { + for entry in entries { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + if let Ok(entry) = entry { + let filepath = entry.path(); + if let Ok(metadata) = std::fs::symlink_metadata(&filepath) { + let filename = if let Ok(fname) = filepath.strip_prefix(&cwd) { + fname + } else { + Path::new(&filepath) + }; + + let value = dir_entry_dict(filename, &metadata, &name_tag)?; + yield ReturnSuccess::value(value); + } + } + } + }; + return Ok(stream.to_output_stream()); + } + } + + let entries = match glob::glob(&full_path.to_string_lossy()) { + Ok(files) => files, Err(_) => { - if let Some(source) = args.nth(0) { + if let Some(source) = pattern { return Err(ShellError::labeled_error( "Invalid pattern", "Invalid pattern", - source.span(), + source.tag(), )); } else { - return Err(ShellError::string("Invalid pattern.")); + return Err(ShellError::untagged_runtime_error("Invalid pattern.")); } } }; - let mut shell_entries = VecDeque::new(); - - // If this is a single entry, try to display the contents of the entry if it's a directory - if entries.len() == 1 { - if let Ok(entry) = &entries[0] { - if entry.is_dir() { - let entries = std::fs::read_dir(&full_path); - - let entries = match entries { - Err(e) => { - if let Some(s) = args.nth(0) { - return Err(ShellError::labeled_error( - e.to_string(), - e.to_string(), - s.span(), - )); - } else { - return Err(ShellError::labeled_error( - e.to_string(), - e.to_string(), - args.name_span(), - )); - } - } - Ok(o) => o, - }; - for entry in entries { - let entry = entry?; - let filepath = entry.path(); - let filename = if let Ok(fname) = filepath.strip_prefix(&cwd) { + // Enumerate the entries from the glob and add each + let stream = async_stream! { + for entry in entries { + if ctrl_c.load(Ordering::SeqCst) { + break; + } + if let Ok(entry) = entry { + if let Ok(metadata) = std::fs::symlink_metadata(&entry) { + let filename = if let Ok(fname) = entry.strip_prefix(&cwd) { fname } else { - Path::new(&filepath) + Path::new(&entry) }; - let value = dir_entry_dict( - filename, - &entry.metadata()?, - Tag::unknown_origin(args.call_info.name_span), - )?; - shell_entries.push_back(ReturnSuccess::value(value)) + + if let Ok(value) = dir_entry_dict(filename, &metadata, &name_tag) { + yield ReturnSuccess::value(value); + } } - return Ok(shell_entries.to_output_stream()); } } - } - - // Enumerate the entries from the glob and add each - for entry in entries { - if let Ok(entry) = entry { - let filename = if let Ok(fname) = entry.strip_prefix(&cwd) { - fname - } else { - Path::new(&entry) - }; - let metadata = std::fs::metadata(&entry)?; - let value = dir_entry_dict( - filename, - &metadata, - Tag::unknown_origin(args.call_info.name_span), - )?; - shell_entries.push_back(ReturnSuccess::value(value)) - } - } - - Ok(shell_entries.to_output_stream()) + }; + Ok(stream.to_output_stream()) } fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { @@ -175,44 +193,46 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Can not change to home directory", "can not go to home", - args.call_info.name_span, + &args.call_info.name_tag, )) } }, Some(v) => { let target = v.as_path()?; - let path = PathBuf::from(self.path()); - match dunce::canonicalize(path.join(target).as_path()) { - Ok(p) => p, - Err(_) => { + + if PathBuf::from("-") == target { + PathBuf::from(&self.last_path) + } else { + let path = PathBuf::from(self.path()); + + if target.exists() && !target.is_dir() { return Err(ShellError::labeled_error( "Can not change to directory", - "directory not found", - v.span().clone(), + "is not a directory", + v.tag().clone(), )); } + + match dunce::canonicalize(path.join(&target)) { + Ok(p) => p, + Err(_) => { + return Err(ShellError::labeled_error( + "Can not change to directory", + "directory not found", + v.tag().clone(), + )) + } + } } } }; let mut stream = VecDeque::new(); - match std::env::set_current_dir(&path) { - Ok(_) => {} - Err(_) => { - if let Some(directory) = args.nth(0) { - return Err(ShellError::labeled_error( - "Can not change to directory", - "directory not found", - directory.span(), - )); - } else { - return Err(ShellError::string("Can not change to directory")); - } - } - } + stream.push_back(ReturnSuccess::change_cwd( path.to_string_lossy().to_string(), )); + Ok(stream.into()) } @@ -223,10 +243,10 @@ impl Shell for FilesystemShell { dst, recursive, }: CopyArgs, - name: Span, + name: Tag, path: &str, ) -> Result { - let name_span = name; + let name_tag = name; let mut source = PathBuf::from(path); let mut destination = PathBuf::from(path); @@ -281,7 +301,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( e.to_string(), e.to_string(), - name_span, + name_tag, )); } Ok(o) => o, @@ -297,7 +317,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( e.to_string(), e.to_string(), - name_span, + name_tag, )); } Ok(o) => o, @@ -333,7 +353,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( e.to_string(), e.to_string(), - name_span, + name_tag, )); } Ok(o) => o, @@ -347,7 +367,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( e.to_string(), e.to_string(), - name_span, + name_tag, )); } Ok(o) => o, @@ -361,7 +381,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Copy aborted. Not a valid path", "Copy aborted. Not a valid path", - name_span, + name_tag, )) } } @@ -371,7 +391,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( e.to_string(), e.to_string(), - name_span, + name_tag, )); } Ok(o) => o, @@ -407,7 +427,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( e.to_string(), e.to_string(), - name_span, + name_tag, )); } Ok(o) => o, @@ -421,7 +441,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( e.to_string(), e.to_string(), - name_span, + name_tag, )); } Ok(o) => o, @@ -454,7 +474,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Copy aborted. Not a valid path", "Copy aborted. Not a valid path", - name_span, + name_tag, )) } } @@ -481,7 +501,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Copy aborted. Not a valid destination", "Copy aborted. Not a valid destination", - name_span, + name_tag, )) } } @@ -490,7 +510,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( format!("Copy aborted. (Does {:?} exist?)", destination_file_name), format!("Copy aborted. (Does {:?} exist?)", destination_file_name), - &dst.span(), + dst.tag(), )); } } @@ -501,7 +521,7 @@ impl Shell for FilesystemShell { fn mkdir( &self, MkdirArgs { rest: directories }: MkdirArgs, - name: Span, + name: Tag, path: &str, ) -> Result { let full_path = PathBuf::from(path); @@ -526,7 +546,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( reason.to_string(), reason.to_string(), - dir.span(), + dir.tag(), )) } Ok(_) => {} @@ -539,10 +559,10 @@ impl Shell for FilesystemShell { fn mv( &self, MoveArgs { src, dst }: MoveArgs, - name: Span, + name: Tag, path: &str, ) -> Result { - let name_span = name; + let name_tag = name; let mut source = PathBuf::from(path); let mut destination = PathBuf::from(path); @@ -568,7 +588,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Rename aborted. Not a valid destination", "Rename aborted. Not a valid destination", - dst.span(), + dst.tag(), )) } } @@ -582,7 +602,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Rename aborted. Not a valid entry name", "Rename aborted. Not a valid entry name", - name_span, + name_tag, )) } }; @@ -594,7 +614,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( format!("Rename aborted. {:}", e.to_string()), format!("Rename aborted. {:}", e.to_string()), - name_span, + name_tag, )) } }; @@ -618,7 +638,7 @@ impl Shell for FilesystemShell { destination_file_name, e.to_string(), ), - name_span, + name_tag, )); } Ok(o) => o, @@ -641,7 +661,7 @@ impl Shell for FilesystemShell { destination_file_name, e.to_string(), ), - name_span, + name_tag, )); } Ok(o) => o, @@ -663,7 +683,7 @@ impl Shell for FilesystemShell { destination_file_name, e.to_string(), ), - name_span, + name_tag, )); } Ok(o) => o, @@ -716,7 +736,7 @@ impl Shell for FilesystemShell { destination_file_name, e.to_string(), ), - name_span, + name_tag, )); } Ok(o) => o, @@ -740,7 +760,7 @@ impl Shell for FilesystemShell { destination_file_name, e.to_string(), ), - name_span, + name_tag, )); } Ok(o) => o, @@ -763,7 +783,7 @@ impl Shell for FilesystemShell { destination_file_name, e.to_string(), ), - name_span, + name_tag, )); } Ok(o) => o, @@ -795,7 +815,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Rename aborted. Not a valid entry name", "Rename aborted. Not a valid entry name", - name_span, + name_tag, )) } }; @@ -819,7 +839,7 @@ impl Shell for FilesystemShell { destination_file_name, e.to_string(), ), - name_span, + name_tag, )); } Ok(o) => o, @@ -831,7 +851,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( format!("Rename aborted. (Does {:?} exist?)", destination_file_name), format!("Rename aborted. (Does {:?} exist?)", destination_file_name), - dst.span(), + dst.tag(), )); } } @@ -841,17 +861,21 @@ impl Shell for FilesystemShell { fn rm( &self, - RemoveArgs { target, recursive }: RemoveArgs, - name: Span, + RemoveArgs { + target, + recursive, + trash, + }: RemoveArgs, + name: Tag, path: &str, ) -> Result { - let name_span = name; + let name_tag = name; if target.item.to_str() == Some(".") || target.item.to_str() == Some("..") { return Err(ShellError::labeled_error( "Remove aborted. \".\" or \"..\" may not be removed.", "Remove aborted. \".\" or \"..\" may not be removed.", - target.span(), + target.tag(), )); } @@ -883,7 +907,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( format!("{:?} is a directory. Try using \"--recursive\".", file), format!("{:?} is a directory. Try using \"--recursive\".", file), - target.span(), + target.tag(), )); } } @@ -900,7 +924,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( "Remove aborted. Not a valid path", "Remove aborted. Not a valid path", - name_span, + name_tag, )) } } @@ -920,11 +944,13 @@ impl Shell for FilesystemShell { "Directory {:?} found somewhere inside. Try using \"--recursive\".", path_file_name ), - target.span(), + target.tag(), )); } - if path.is_dir() { + if trash.item { + SendToTrash::remove(path).unwrap(); + } else if path.is_dir() { std::fs::remove_dir_all(&path)?; } else if path.is_file() { std::fs::remove_file(&path)?; @@ -934,7 +960,7 @@ impl Shell for FilesystemShell { return Err(ShellError::labeled_error( format!("Remove aborted. {:}", e.to_string()), format!("Remove aborted. {:}", e.to_string()), - name_span, + name_tag, )) } } @@ -947,6 +973,28 @@ impl Shell for FilesystemShell { self.path.clone() } + fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { + let path = PathBuf::from(self.path()); + let p = match dunce::canonicalize(path.as_path()) { + Ok(p) => p, + Err(_) => { + return Err(ShellError::labeled_error( + "unable to show current directory", + "pwd command failed", + &args.call_info.name_tag, + )); + } + }; + + let mut stream = VecDeque::new(); + stream.push_back(ReturnSuccess::value( + Value::Primitive(Primitive::String(p.to_string_lossy().to_string())) + .tagged(&args.call_info.name_tag), + )); + + Ok(stream.into()) + } + fn set_path(&mut self, path: String) { let pathbuf = PathBuf::from(&path); let path = match dunce::canonicalize(pathbuf.as_path()) { @@ -959,6 +1007,7 @@ impl Shell for FilesystemShell { pathbuf } }; + self.last_path = self.path.clone(); self.path = path.to_string_lossy().to_string(); } diff --git a/src/shell/help_shell.rs b/src/shell/help_shell.rs index 59f141b8c0..7c0e74bde4 100644 --- a/src/shell/help_shell.rs +++ b/src/shell/help_shell.rs @@ -3,8 +3,7 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; -use crate::object::{TaggedDictBuilder, command_dict}; +use crate::data::{command_dict, TaggedDictBuilder}; use crate::prelude::*; use crate::shell::shell::Shell; use std::ffi::OsStr; @@ -26,13 +25,16 @@ impl HelpShell { let value = command_dict(registry.get_command(&cmd).unwrap(), Tag::unknown()); spec.insert("name", cmd); - spec.insert("description", value.get_data_by_key("usage").unwrap().as_string().unwrap()); + spec.insert( + "description", + value.get_data_by_key("usage").unwrap().as_string().unwrap(), + ); spec.insert_tagged("details", value); specs.push(spec.into_tagged_value()); } - cmds.insert("help", Value::List(specs)); + cmds.insert("help", Value::Table(specs)); Ok(HelpShell { path: "/help".to_string(), @@ -47,8 +49,10 @@ impl HelpShell { let mut sh = HelpShell::index(®istry)?; if let Tagged { - item: Value::Primitive(Primitive::String(name)), .. - } = cmd { + item: Value::Primitive(Primitive::String(name)), + .. + } = cmd + { sh.set_path(format!("/help/{:}/details", name)); } @@ -76,7 +80,7 @@ impl HelpShell { } match viewed { Tagged { - item: Value::List(l), + item: Value::Table(l), .. } => { for item in l { @@ -93,11 +97,11 @@ impl HelpShell { } impl Shell for HelpShell { - fn name(&self, source_map: &SourceMap) -> String { - let origin_name = self.value.origin_name(source_map); + fn name(&self) -> String { + let anchor_name = self.value.anchor_name(); format!( "{}", - match origin_name { + match anchor_name { Some(x) => format!("{{{}}}", x), None => format!("<{}>", self.value.item.type_name(),), } @@ -112,12 +116,20 @@ impl Shell for HelpShell { self.path.clone() } + fn pwd(&self, _: EvaluatedWholeStreamCommandArgs) -> Result { + Ok(OutputStream::empty()) + } + fn set_path(&mut self, path: String) { let _ = std::env::set_current_dir(&path); self.path = path.clone(); } - fn ls(&self, _args: EvaluatedWholeStreamCommandArgs) -> Result { + fn ls( + &self, + _pattern: Option>, + _context: &RunnableContext, + ) -> Result { Ok(self .commands() .map(|x| ReturnSuccess::value(x)) @@ -152,24 +164,19 @@ impl Shell for HelpShell { Ok(stream.into()) } - fn cp(&self, _args: CopyArgs, _name: Span, _path: &str) -> Result { + fn cp(&self, _args: CopyArgs, _name: Tag, _path: &str) -> Result { Ok(OutputStream::empty()) } - fn mv(&self, _args: MoveArgs, _name: Span, _path: &str) -> Result { + fn mv(&self, _args: MoveArgs, _name: Tag, _path: &str) -> Result { Ok(OutputStream::empty()) } - fn mkdir( - &self, - _args: MkdirArgs, - _name: Span, - _path: &str, - ) -> Result { + fn mkdir(&self, _args: MkdirArgs, _name: Tag, _path: &str) -> Result { Ok(OutputStream::empty()) } - fn rm(&self, _args: RemoveArgs, _name: Span, _path: &str) -> Result { + fn rm(&self, _args: RemoveArgs, _name: Tag, _path: &str) -> Result { Ok(OutputStream::empty()) } diff --git a/src/shell/helper.rs b/src/shell/helper.rs index 9feffcb4ce..8f38a10002 100644 --- a/src/shell/helper.rs +++ b/src/shell/helper.rs @@ -1,10 +1,11 @@ +use crate::context::Context; +use crate::parser::hir::syntax_shape::{color_fallible_syntax, FlatShape, PipelineShape}; +use crate::parser::hir::TokensIterator; use crate::parser::nom_input; use crate::parser::parse::token_tree::TokenNode; -use crate::parser::parse::tokens::RawToken; -use crate::parser::{Pipeline, PipelineElement}; -use crate::shell::shell_manager::ShellManager; -use crate::Tagged; +use crate::{Span, Spanned, SpannedItem, Tag, Tagged, Text}; use ansi_term::Color; +use log::{log_enabled, trace}; use rustyline::completion::Completer; use rustyline::error::ReadlineError; use rustyline::highlight::Highlighter; @@ -12,12 +13,12 @@ use rustyline::hint::Hinter; use std::borrow::Cow::{self, Owned}; pub(crate) struct Helper { - helper: ShellManager, + context: Context, } impl Helper { - pub(crate) fn new(helper: ShellManager) -> Helper { - Helper { helper } + pub(crate) fn new(context: Context) -> Helper { + Helper { context } } } @@ -29,30 +30,13 @@ impl Completer for Helper { pos: usize, ctx: &rustyline::Context<'_>, ) -> Result<(usize, Vec), ReadlineError> { - self.helper.complete(line, pos, ctx) + self.context.shell_manager.complete(line, pos, ctx) } } -/* -impl Completer for Helper { - type Candidate = rustyline::completion::Pair; - - fn complete( - &self, - line: &str, - pos: usize, - ctx: &rustyline::Context<'_>, - ) -> Result<(usize, Vec), ReadlineError> { - let result = self.helper.complete(line, pos, ctx); - - result.map(|(x, y)| (x, y.iter().map(|z| z.into()).collect())) - } -} -*/ - impl Hinter for Helper { fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { - self.helper.hint(line, pos, ctx) + self.context.shell_manager.hint(line, pos, ctx) } } @@ -77,24 +61,50 @@ impl Highlighter for Helper { Ok(v) => v, }; - let Pipeline { parts, post_ws } = pipeline; - let mut iter = parts.into_iter(); + let tokens = vec![TokenNode::Pipeline(pipeline.clone().spanned(v.span()))]; + let mut tokens = TokensIterator::all(&tokens[..], v.span()); - loop { - match iter.next() { - None => { - if let Some(ws) = post_ws { - out.push_str(ws.slice(line)); - } + let text = Text::from(line); + let expand_context = self + .context + .expand_context(&text, Span::new(0, line.len() - 1)); - return Cow::Owned(out); - } - Some(token) => { - let styled = paint_pipeline_element(&token, line); - out.push_str(&styled.to_string()); - } - } + #[cfg(not(coloring_in_tokens))] + let shapes = { + let mut shapes = vec![]; + color_fallible_syntax( + &PipelineShape, + &mut tokens, + &expand_context, + &mut shapes, + ) + .unwrap(); + shapes + }; + + #[cfg(coloring_in_tokens)] + let shapes = { + // We just constructed a token list that only contains a pipeline, so it can't fail + color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context).unwrap(); + tokens.with_tracer(|_, tracer| tracer.finish()); + + tokens.state().shapes() + }; + + trace!(target: "nu::color_syntax", "{:#?}", tokens.tracer()); + + if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { + println!(""); + ptree::print_tree(&tokens.tracer().clone().print(Text::from(line))).unwrap(); + println!(""); } + + for shape in shapes { + let styled = paint_flat_shape(&shape, line); + out.push_str(&styled); + } + + Cow::Owned(out) } } } @@ -104,75 +114,55 @@ impl Highlighter for Helper { } } -fn paint_token_node(token_node: &TokenNode, line: &str) -> String { - let styled = match token_node { - TokenNode::Call(..) => Color::Cyan.bold().paint(token_node.span().slice(line)), - TokenNode::Whitespace(..) => Color::White.normal().paint(token_node.span().slice(line)), - TokenNode::Flag(..) => Color::Black.bold().paint(token_node.span().slice(line)), - TokenNode::Member(..) => Color::Yellow.bold().paint(token_node.span().slice(line)), - TokenNode::Path(..) => Color::Green.bold().paint(token_node.span().slice(line)), - TokenNode::Error(..) => Color::Red.bold().paint(token_node.span().slice(line)), - TokenNode::Delimited(..) => Color::White.paint(token_node.span().slice(line)), - TokenNode::Operator(..) => Color::White.normal().paint(token_node.span().slice(line)), - TokenNode::Pipeline(..) => Color::Blue.normal().paint(token_node.span().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Number(..), - .. - }) => Color::Purple.bold().paint(token_node.span().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Size(..), - .. - }) => Color::Purple.bold().paint(token_node.span().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::String(..), - .. - }) => Color::Green.normal().paint(token_node.span().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Variable(..), - .. - }) => Color::Yellow.bold().paint(token_node.span().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::Bare, - .. - }) => Color::Green.normal().paint(token_node.span().slice(line)), - TokenNode::Token(Tagged { - item: RawToken::External(..), - .. - }) => Color::Cyan.bold().paint(token_node.span().slice(line)), - }; +#[allow(unused)] +fn vec_tag(input: Vec>) -> Option { + let mut iter = input.iter(); + let first = iter.next()?.tag.clone(); + let last = iter.last(); - styled.to_string() + Some(match last { + None => first, + Some(last) => first.until(&last.tag), + }) } -fn paint_pipeline_element(pipeline_element: &PipelineElement, line: &str) -> String { - let mut styled = String::new(); - - if let Some(ws) = pipeline_element.pre_ws { - styled.push_str(&Color::White.normal().paint(ws.slice(line))); - } - - styled.push_str( - &Color::Cyan - .bold() - .paint(pipeline_element.call().head().span().slice(line)) - .to_string(), - ); - - if let Some(children) = pipeline_element.call().children() { - for child in children { - styled.push_str(&paint_token_node(child, line)); +fn paint_flat_shape(flat_shape: &Spanned, line: &str) -> String { + let style = match &flat_shape.item { + FlatShape::OpenDelimiter(_) => Color::White.normal(), + FlatShape::CloseDelimiter(_) => Color::White.normal(), + FlatShape::ItVariable => Color::Purple.bold(), + FlatShape::Variable => Color::Purple.normal(), + FlatShape::Operator => Color::Yellow.normal(), + FlatShape::Dot => Color::White.normal(), + FlatShape::InternalCommand => Color::Cyan.bold(), + FlatShape::ExternalCommand => Color::Cyan.normal(), + FlatShape::ExternalWord => Color::Black.bold(), + FlatShape::BareMember => Color::Yellow.bold(), + FlatShape::StringMember => Color::Yellow.bold(), + FlatShape::String => Color::Green.normal(), + FlatShape::Path => Color::Cyan.normal(), + FlatShape::GlobPattern => Color::Cyan.bold(), + FlatShape::Word => Color::Green.normal(), + FlatShape::Pipe => Color::Purple.bold(), + FlatShape::Flag => Color::Black.bold(), + FlatShape::ShorthandFlag => Color::Black.bold(), + FlatShape::Int => Color::Purple.bold(), + FlatShape::Decimal => Color::Purple.bold(), + FlatShape::Whitespace => Color::White.normal(), + FlatShape::Error => Color::Red.bold(), + FlatShape::Size { number, unit } => { + let number = number.slice(line); + let unit = unit.slice(line); + return format!( + "{}{}", + Color::Purple.bold().paint(number), + Color::Cyan.bold().paint(unit) + ); } - } + }; - if let Some(ws) = pipeline_element.post_ws { - styled.push_str(&Color::White.normal().paint(ws.slice(line))); - } - - if let Some(_) = pipeline_element.post_pipe { - styled.push_str(&Color::Purple.paint("|")); - } - - styled.to_string() + let body = flat_shape.span.slice(line); + style.paint(body).to_string() } impl rustyline::Helper for Helper {} diff --git a/src/shell/shell.rs b/src/shell/shell.rs index dc1f104b6f..507fc0517b 100644 --- a/src/shell/shell.rs +++ b/src/shell/shell.rs @@ -3,23 +3,27 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::errors::ShellError; use crate::prelude::*; use crate::stream::OutputStream; use std::path::PathBuf; pub trait Shell: std::fmt::Debug { - fn name(&self, source_map: &SourceMap) -> String; + fn name(&self) -> String; fn homedir(&self) -> Option; - fn ls(&self, args: EvaluatedWholeStreamCommandArgs) -> Result; + fn ls( + &self, + pattern: Option>, + context: &RunnableContext, + ) -> Result; fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result; - fn cp(&self, args: CopyArgs, name: Span, path: &str) -> Result; - fn mkdir(&self, args: MkdirArgs, name: Span, path: &str) -> Result; - fn mv(&self, args: MoveArgs, name: Span, path: &str) -> Result; - fn rm(&self, args: RemoveArgs, name: Span, path: &str) -> Result; + fn cp(&self, args: CopyArgs, name: Tag, path: &str) -> Result; + fn mkdir(&self, args: MkdirArgs, name: Tag, path: &str) -> Result; + fn mv(&self, args: MoveArgs, name: Tag, path: &str) -> Result; + fn rm(&self, args: RemoveArgs, name: Tag, path: &str) -> Result; fn path(&self) -> String; + fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result; fn set_path(&mut self, path: String); fn complete( diff --git a/src/shell/shell_manager.rs b/src/shell/shell_manager.rs index 136d9b0173..149fdd58d1 100644 --- a/src/shell/shell_manager.rs +++ b/src/shell/shell_manager.rs @@ -10,18 +10,19 @@ use crate::shell::shell::Shell; use crate::stream::OutputStream; use std::error::Error; use std::path::PathBuf; +use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::{Arc, Mutex}; #[derive(Clone, Debug)] pub struct ShellManager { - pub(crate) current_shell: usize, + pub(crate) current_shell: Arc, pub(crate) shells: Arc>>>, } impl ShellManager { pub fn basic(commands: CommandRegistry) -> Result> { Ok(ShellManager { - current_shell: 0, + current_shell: Arc::new(AtomicUsize::new(0)), shells: Arc::new(Mutex::new(vec![Box::new(FilesystemShell::basic( commands, )?)])), @@ -30,24 +31,29 @@ impl ShellManager { pub fn insert_at_current(&mut self, shell: Box) { self.shells.lock().unwrap().push(shell); - self.current_shell = self.shells.lock().unwrap().len() - 1; + self.current_shell + .store(self.shells.lock().unwrap().len() - 1, Ordering::SeqCst); self.set_path(self.path()); } + pub fn current_shell(&self) -> usize { + self.current_shell.load(Ordering::SeqCst) + } + pub fn remove_at_current(&mut self) { { let mut shells = self.shells.lock().unwrap(); if shells.len() > 0 { - if self.current_shell == shells.len() - 1 { + if self.current_shell() == shells.len() - 1 { shells.pop(); let new_len = shells.len(); if new_len > 0 { - self.current_shell = new_len - 1; + self.current_shell.store(new_len - 1, Ordering::SeqCst); } else { return; } } else { - shells.remove(self.current_shell); + shells.remove(self.current_shell()); } } } @@ -59,11 +65,17 @@ impl ShellManager { } pub fn path(&self) -> String { - self.shells.lock().unwrap()[self.current_shell].path() + self.shells.lock().unwrap()[self.current_shell()].path() + } + + pub fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { + let env = self.shells.lock().unwrap(); + + env[self.current_shell()].pwd(args) } pub fn set_path(&mut self, path: String) { - self.shells.lock().unwrap()[self.current_shell].set_path(path) + self.shells.lock().unwrap()[self.current_shell()].set_path(path) } pub fn complete( @@ -72,20 +84,21 @@ impl ShellManager { pos: usize, ctx: &rustyline::Context<'_>, ) -> Result<(usize, Vec), rustyline::error::ReadlineError> { - self.shells.lock().unwrap()[self.current_shell].complete(line, pos, ctx) + self.shells.lock().unwrap()[self.current_shell()].complete(line, pos, ctx) } pub fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option { - self.shells.lock().unwrap()[self.current_shell].hint(line, pos, ctx) + self.shells.lock().unwrap()[self.current_shell()].hint(line, pos, ctx) } pub fn next(&mut self) { { let shell_len = self.shells.lock().unwrap().len(); - if self.current_shell == (shell_len - 1) { - self.current_shell = 0; + if self.current_shell() == (shell_len - 1) { + self.current_shell.store(0, Ordering::SeqCst); } else { - self.current_shell += 1; + self.current_shell + .store(self.current_shell() + 1, Ordering::SeqCst); } } self.set_path(self.path()); @@ -94,10 +107,11 @@ impl ShellManager { pub fn prev(&mut self) { { let shell_len = self.shells.lock().unwrap().len(); - if self.current_shell == 0 { - self.current_shell = shell_len - 1; + if self.current_shell() == 0 { + self.current_shell.store(shell_len - 1, Ordering::SeqCst); } else { - self.current_shell -= 1; + self.current_shell + .store(self.current_shell() - 1, Ordering::SeqCst); } } self.set_path(self.path()); @@ -106,19 +120,23 @@ impl ShellManager { pub fn homedir(&self) -> Option { let env = self.shells.lock().unwrap(); - env[self.current_shell].homedir() + env[self.current_shell()].homedir() } - pub fn ls(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { + pub fn ls( + &self, + path: Option>, + context: &RunnableContext, + ) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].ls(args) + env[self.current_shell()].ls(path, context) } pub fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { let env = self.shells.lock().unwrap(); - env[self.current_shell].cd(args) + env[self.current_shell()].cd(args) } pub fn cp( @@ -130,13 +148,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].cp(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].cp(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -150,13 +168,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].rm(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].rm(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -170,13 +188,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].mkdir(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].mkdir(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } @@ -190,13 +208,13 @@ impl ShellManager { match env { Ok(x) => { - let path = x[self.current_shell].path(); - x[self.current_shell].mv(args, context.name, &path) + let path = x[self.current_shell()].path(); + x[self.current_shell()].mv(args, context.name.clone(), &path) } Err(e) => Err(ShellError::labeled_error( format!("Internal error: could not lock {}", e), "Internal error: could not lock", - context.name, + &context.name, )), } } diff --git a/src/shell/value_shell.rs b/src/shell/value_shell.rs index 4c9cab5a93..0aa9e341bb 100644 --- a/src/shell/value_shell.rs +++ b/src/shell/value_shell.rs @@ -3,28 +3,37 @@ use crate::commands::cp::CopyArgs; use crate::commands::mkdir::MkdirArgs; use crate::commands::mv::MoveArgs; use crate::commands::rm::RemoveArgs; -use crate::context::SourceMap; use crate::prelude::*; use crate::shell::shell::Shell; +use crate::utils::ValueStructure; use std::ffi::OsStr; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; -#[derive(Clone, Debug)] +#[derive(Clone)] pub struct ValueShell { pub(crate) path: String, + pub(crate) last_path: String, pub(crate) value: Tagged, } +impl std::fmt::Debug for ValueShell { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "ValueShell @ {}", self.path) + } +} + impl ValueShell { pub fn new(value: Tagged) -> ValueShell { ValueShell { path: "/".to_string(), + last_path: "/".to_string(), value, } } - fn members(&self) -> VecDeque> { + + fn members_under(&self, path: &Path) -> VecDeque> { let mut shell_entries = VecDeque::new(); - let full_path = PathBuf::from(&self.path); + let full_path = path.to_path_buf(); let mut viewed = self.value.clone(); let sep_string = std::path::MAIN_SEPARATOR.to_string(); let sep = OsStr::new(&sep_string); @@ -41,7 +50,7 @@ impl ValueShell { } match viewed { Tagged { - item: Value::List(l), + item: Value::Table(l), .. } => { for item in l { @@ -55,14 +64,18 @@ impl ValueShell { shell_entries } + + fn members(&self) -> VecDeque> { + self.members_under(Path::new(".")) + } } impl Shell for ValueShell { - fn name(&self, source_map: &SourceMap) -> String { - let origin_name = self.value.origin_name(source_map); + fn name(&self) -> String { + let anchor_name = self.value.anchor_name(); format!( "{}", - match origin_name { + match anchor_name { Some(x) => format!("{{{}}}", x), None => format!("<{}>", self.value.item.type_name(),), } @@ -70,18 +83,51 @@ impl Shell for ValueShell { } fn homedir(&self) -> Option { - dirs::home_dir() + Some(PathBuf::from("/")) } - fn ls(&self, _args: EvaluatedWholeStreamCommandArgs) -> Result { + fn ls( + &self, + target: Option>, + context: &RunnableContext, + ) -> Result { + let mut full_path = PathBuf::from(self.path()); + let name_tag = context.name.clone(); + + match &target { + Some(value) => full_path.push(value.as_ref()), + _ => {} + } + + let mut value_system = ValueStructure::new(); + value_system.walk_decorate(&self.value)?; + + if !value_system.exists(&full_path) { + if let Some(target) = &target { + return Err(ShellError::labeled_error( + "Can not list entries inside", + "No such path exists", + target.tag(), + )); + } + + return Err(ShellError::labeled_error( + "Can not list entries inside", + "No such path exists", + name_tag, + )); + } + Ok(self - .members() + .members_under(full_path.as_path()) .map(|x| ReturnSuccess::value(x)) .to_output_stream()) } fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { - let path = match args.nth(0) { + let destination = args.nth(0); + + let path = match destination { None => "/".to_string(), Some(v) => { let target = v.as_path()?; @@ -90,6 +136,8 @@ impl Shell for ValueShell { if target == PathBuf::from("..") { cwd.pop(); + } else if target == PathBuf::from("-") { + cwd = PathBuf::from(&self.last_path); } else { match target.to_str() { Some(target) => match target.chars().nth(0) { @@ -103,12 +151,31 @@ impl Shell for ValueShell { } }; + let mut value_system = ValueStructure::new(); + value_system.walk_decorate(&self.value)?; + + if !value_system.exists(&PathBuf::from(&path)) { + if let Some(destination) = destination { + return Err(ShellError::labeled_error( + "Can not change to path inside", + "No such path exists", + destination.tag(), + )); + } + + return Err(ShellError::labeled_error( + "Can not change to path inside", + "No such path exists", + &args.call_info.name_tag, + )); + } + let mut stream = VecDeque::new(); stream.push_back(ReturnSuccess::change_cwd(path)); Ok(stream.into()) } - fn cp(&self, _args: CopyArgs, name: Span, _path: &str) -> Result { + fn cp(&self, _args: CopyArgs, name: Tag, _path: &str) -> Result { Err(ShellError::labeled_error( "cp not currently supported on values", "not currently supported", @@ -116,7 +183,7 @@ impl Shell for ValueShell { )) } - fn mv(&self, _args: MoveArgs, name: Span, _path: &str) -> Result { + fn mv(&self, _args: MoveArgs, name: Tag, _path: &str) -> Result { Err(ShellError::labeled_error( "mv not currently supported on values", "not currently supported", @@ -124,7 +191,7 @@ impl Shell for ValueShell { )) } - fn mkdir(&self, _args: MkdirArgs, name: Span, _path: &str) -> Result { + fn mkdir(&self, _args: MkdirArgs, name: Tag, _path: &str) -> Result { Err(ShellError::labeled_error( "mkdir not currently supported on values", "not currently supported", @@ -132,7 +199,7 @@ impl Shell for ValueShell { )) } - fn rm(&self, _args: RemoveArgs, name: Span, _path: &str) -> Result { + fn rm(&self, _args: RemoveArgs, name: Tag, _path: &str) -> Result { Err(ShellError::labeled_error( "rm not currently supported on values", "not currently supported", @@ -144,8 +211,16 @@ impl Shell for ValueShell { self.path.clone() } + fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result { + let mut stream = VecDeque::new(); + stream.push_back(ReturnSuccess::value( + Value::string(self.path()).tagged(&args.call_info.name_tag), + )); + Ok(stream.into()) + } + fn set_path(&mut self, path: String) { - let _ = std::env::set_current_dir(&path); + self.last_path = self.path.clone(); self.path = path.clone(); } diff --git a/src/stream.rs b/src/stream.rs index 066acb74a1..f6f2d5e2e1 100644 --- a/src/stream.rs +++ b/src/stream.rs @@ -23,6 +23,17 @@ impl InputStream { } } +impl Stream for InputStream { + type Item = Tagged; + + fn poll_next( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> core::task::Poll> { + Stream::poll_next(std::pin::Pin::new(&mut self.values), cx) + } +} + impl From>> for InputStream { fn from(input: BoxStream<'static, Tagged>) -> InputStream { InputStream { values: input } diff --git a/src/traits.rs b/src/traits.rs index 5b022c444f..677d019ad8 100644 --- a/src/traits.rs +++ b/src/traits.rs @@ -12,8 +12,8 @@ impl fmt::Display for Debuggable<'_, T> { } } -pub trait HasSpan { - fn span(&self) -> Span; +pub trait HasTag { + fn tag(&self) -> Tag; } pub trait ToDebug: Sized { diff --git a/src/utils.rs b/src/utils.rs index 4ed9be2540..56fee491b6 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,7 +1,9 @@ +use crate::data::meta::Tagged; +use crate::data::Value; use crate::errors::ShellError; use std::fmt; use std::ops::Div; -use std::path::{Path, PathBuf}; +use std::path::{Component, Path, PathBuf}; pub struct AbsoluteFile { inner: PathBuf, @@ -129,23 +131,131 @@ impl fmt::Display for RelativePath { } } +pub enum TaggedValueIter<'a> { + Empty, + List(indexmap::map::Iter<'a, String, Tagged>), +} + +impl<'a> Iterator for TaggedValueIter<'a> { + type Item = (&'a String, &'a Tagged); + + fn next(&mut self) -> Option { + match self { + TaggedValueIter::Empty => None, + TaggedValueIter::List(iter) => iter.next(), + } + } +} + +impl Tagged { + fn is_dir(&self) -> bool { + match self.item() { + Value::Row(_) | Value::Table(_) => true, + _ => false, + } + } + + fn entries(&self) -> TaggedValueIter<'_> { + match self.item() { + Value::Row(o) => { + let iter = o.entries.iter(); + TaggedValueIter::List(iter) + } + _ => TaggedValueIter::Empty, + } + } +} + +#[derive(Debug, Eq, Ord, PartialEq, PartialOrd)] +pub struct ValueResource { + pub at: usize, + pub loc: PathBuf, +} + +impl ValueResource {} + +pub struct ValueStructure { + pub resources: Vec, +} + +impl ValueStructure { + pub fn new() -> ValueStructure { + ValueStructure { + resources: Vec::::new(), + } + } + + pub fn exists(&self, path: &Path) -> bool { + if path == Path::new("/") { + return true; + } + + let path = if path.starts_with("/") { + match path.strip_prefix("/") { + Ok(p) => p, + Err(_) => path, + } + } else { + path + }; + + let comps: Vec<_> = path.components().map(Component::as_os_str).collect(); + + let mut is_there = true; + + for (at, fragment) in comps.iter().enumerate() { + is_there = is_there + && self + .resources + .iter() + .any(|resource| at == resource.at && *fragment == resource.loc.as_os_str()); + } + + is_there + } + + pub fn walk_decorate(&mut self, start: &Tagged) -> Result<(), ShellError> { + self.resources = Vec::::new(); + self.build(start, 0)?; + self.resources.sort(); + + Ok(()) + } + + fn build(&mut self, src: &Tagged, lvl: usize) -> Result<(), ShellError> { + for entry in src.entries() { + let value = entry.1; + let path = entry.0; + + self.resources.push(ValueResource { + at: lvl, + loc: PathBuf::from(path), + }); + + if value.is_dir() { + self.build(value, lvl + 1)?; + } + } + + Ok(()) + } +} + #[derive(Debug, Eq, Ord, PartialEq, PartialOrd)] pub struct Res { - pub loc: PathBuf, pub at: usize, + pub loc: PathBuf, } impl Res {} pub struct FileStructure { - root: PathBuf, pub resources: Vec, } impl FileStructure { pub fn new() -> FileStructure { FileStructure { - root: PathBuf::new(), resources: Vec::::new(), } } @@ -158,10 +268,6 @@ impl FileStructure { self.resources.len() > 0 } - pub fn set_root(&mut self, path: &Path) { - self.root = path.to_path_buf(); - } - pub fn paths_applying_with( &mut self, to: F, @@ -177,7 +283,6 @@ impl FileStructure { } pub fn walk_decorate(&mut self, start_path: &Path) -> Result<(), ShellError> { - self.set_root(&dunce::canonicalize(start_path)?); self.resources = Vec::::new(); self.build(start_path, 0)?; self.resources.sort(); @@ -189,7 +294,7 @@ impl FileStructure { let source = dunce::canonicalize(src)?; if source.is_dir() { - for entry in std::fs::read_dir(&source)? { + for entry in std::fs::read_dir(src)? { let entry = entry?; let path = entry.path(); @@ -215,9 +320,10 @@ impl FileStructure { #[cfg(test)] mod tests { + use super::{FileStructure, Res, ValueResource, ValueStructure}; + use crate::data::meta::{Tag, Tagged}; + use crate::data::{TaggedDictBuilder, Value}; use pretty_assertions::assert_eq; - - use super::{FileStructure, Res}; use std::path::PathBuf; fn fixtures() -> PathBuf { @@ -232,11 +338,95 @@ mod tests { } } + fn structured_sample_record(key: &str, value: &str) -> Tagged { + let mut record = TaggedDictBuilder::new(Tag::unknown()); + record.insert(key.clone(), Value::string(value)); + record.into_tagged_value() + } + + fn sample_nushell_source_code() -> Tagged { + /* + src + commands + plugins => "sys.rs" + tests + helpers => "mod.rs" + */ + + let mut src = TaggedDictBuilder::new(Tag::unknown()); + let mut record = TaggedDictBuilder::new(Tag::unknown()); + + record.insert_tagged("commands", structured_sample_record("plugins", "sys.rs")); + record.insert_tagged("tests", structured_sample_record("helpers", "mod.rs")); + src.insert_tagged("src", record.into_tagged_value()); + + src.into_tagged_value() + } + #[test] - fn prepares_and_decorates_source_files_for_copying() { + fn prepares_and_decorates_value_filesystemlike_sources() { + let mut res = ValueStructure::new(); + + res.walk_decorate(&sample_nushell_source_code()) + .expect("Can not decorate values traversal."); + + assert_eq!( + res.resources, + vec![ + ValueResource { + loc: PathBuf::from("src"), + at: 0, + }, + ValueResource { + loc: PathBuf::from("commands"), + at: 1, + }, + ValueResource { + loc: PathBuf::from("tests"), + at: 1, + }, + ValueResource { + loc: PathBuf::from("helpers"), + at: 2, + }, + ValueResource { + loc: PathBuf::from("plugins"), + at: 2, + }, + ] + ); + } + + #[test] + fn recognizes_if_path_exists_in_value_filesystemlike_sources() { + let mut res = ValueStructure::new(); + + res.walk_decorate(&sample_nushell_source_code()) + .expect("Can not decorate values traversal."); + + assert!(res.exists(&PathBuf::from("/"))); + + assert!(res.exists(&PathBuf::from("src/commands/plugins"))); + assert!(res.exists(&PathBuf::from("src/commands"))); + assert!(res.exists(&PathBuf::from("src/tests"))); + assert!(res.exists(&PathBuf::from("src/tests/helpers"))); + assert!(res.exists(&PathBuf::from("src"))); + + assert!(res.exists(&PathBuf::from("/src/commands/plugins"))); + assert!(res.exists(&PathBuf::from("/src/commands"))); + assert!(res.exists(&PathBuf::from("/src/tests"))); + assert!(res.exists(&PathBuf::from("/src/tests/helpers"))); + assert!(res.exists(&PathBuf::from("/src"))); + + assert!(!res.exists(&PathBuf::from("/not_valid"))); + assert!(!res.exists(&PathBuf::from("/src/not_valid"))); + } + + #[test] + fn prepares_and_decorates_filesystem_source_files() { let mut res = FileStructure::new(); - res.walk_decorate(fixtures().as_path()) + res.walk_decorate(&fixtures()) .expect("Can not decorate files traversal."); assert_eq!( @@ -258,6 +448,10 @@ mod tests { loc: fixtures().join("cargo_sample.toml"), at: 0 }, + Res { + loc: fixtures().join("fileA.txt"), + at: 0 + }, Res { loc: fixtures().join("jonathan.xml"), at: 0 @@ -274,6 +468,10 @@ mod tests { loc: fixtures().join("sample.ini"), at: 0 }, + Res { + loc: fixtures().join("sample.url"), + at: 0 + }, Res { loc: fixtures().join("sgml_description.json"), at: 0 diff --git a/tests/command_cd_tests.rs b/tests/command_cd_tests.rs index 216bcc8c80..8b6592c940 100644 --- a/tests/command_cd_tests.rs +++ b/tests/command_cd_tests.rs @@ -1,12 +1,375 @@ mod helpers; +use helpers::{Playground, Stub::*}; +use std::path::PathBuf; + #[test] -fn cd_directory_not_found() { +fn filesystem_change_from_current_directory_using_relative_path() { + Playground::setup("cd_test_1", |dirs, _| { + let actual = nu!( + cwd: dirs.root(), + r#" + cd cd_test_1 + pwd | echo $it + "# + ); + + assert_eq!(PathBuf::from(actual), *dirs.test()); + }) +} + +#[test] +fn filesystem_change_from_current_directory_using_absolute_path() { + Playground::setup("cd_test_2", |dirs, _| { + let actual = nu!( + cwd: dirs.test(), + r#" + cd {} + pwd | echo $it + "#, + dirs.formats() + ); + + assert_eq!(PathBuf::from(actual), dirs.formats()); + }) +} + +#[test] +fn filesystem_switch_back_to_previous_working_directory() { + Playground::setup("cd_test_3", |dirs, sandbox| { + sandbox.mkdir("odin"); + + let actual = nu!( + cwd: dirs.test().join("odin"), + r#" + cd {} + cd - + pwd | echo $it + "#, + dirs.test() + ); + + assert_eq!(PathBuf::from(actual), dirs.test().join("odin")); + }) +} + +#[test] +fn filesytem_change_from_current_directory_using_relative_path_and_dash() { + Playground::setup("cd_test_4", |dirs, sandbox| { + sandbox.within("odin").mkdir("-"); + + let actual = nu!( + cwd: dirs.test(), + r#" + cd odin/- + pwd | echo $it + "# + ); + + assert_eq!(PathBuf::from(actual), dirs.test().join("odin").join("-")); + }) +} + +#[test] +fn filesystem_change_current_directory_to_parent_directory() { + Playground::setup("cd_test_5", |dirs, _| { + let actual = nu!( + cwd: dirs.test(), + r#" + cd .. + pwd | echo $it + "# + ); + + assert_eq!(PathBuf::from(actual), *dirs.root()); + }) +} + +#[test] +fn filesystem_change_to_home_directory() { + Playground::setup("cd_test_6", |dirs, _| { + let actual = nu!( + cwd: dirs.test(), + r#" + cd ~ + pwd | echo $it + "# + ); + + assert_eq!(PathBuf::from(actual), dirs::home_dir().unwrap()); + }) +} + +#[test] +fn filesystem_change_to_a_directory_containing_spaces() { + Playground::setup("cd_test_7", |dirs, sandbox| { + sandbox.mkdir("robalino turner katz"); + + let actual = nu!( + cwd: dirs.test(), + r#" + cd "robalino turner katz" + pwd | echo $it + "# + ); + + assert_eq!( + PathBuf::from(actual), + dirs.test().join("robalino turner katz") + ); + }) +} + +#[test] +fn filesystem_not_a_directory() { + Playground::setup("cd_test_8", |dirs, sandbox| { + sandbox.with_files(vec![EmptyFile("ferris_did_it.txt")]); + + let actual = nu_error!( + cwd: dirs.test(), + "cd ferris_did_it.txt" + ); + + assert!(actual.contains("ferris_did_it.txt")); + assert!(actual.contains("is not a directory")); + }) +} + +#[test] +fn filesystem_directory_not_found() { let actual = nu_error!( - cwd: "tests/fixtures", - "cd dir_that_does_not_exist" + cwd: "tests/fixtures", + "cd dir_that_does_not_exist" ); assert!(actual.contains("dir_that_does_not_exist")); assert!(actual.contains("directory not found")); } + +#[test] +fn valuesystem_change_from_current_path_using_relative_path() { + Playground::setup("cd_test_9", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [[bin]] + path = "src/plugins/turner.rs" + + [[bin]] + path = "src/plugins/robalino.rs" + + [[bin]] + path = "src/plugins/katz.rs" + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), + r#" + enter sample.toml + cd bin + pwd | echo $it + exit + "# + ); + + assert_eq!(PathBuf::from(actual), PathBuf::from("/bin")); + }) +} + +#[test] +fn valuesystem_change_from_current_path_using_absolute_path() { + Playground::setup("cd_test_10", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [dependencies] + turner-ts = "0.1.1" + robalino-tkd = "0.0.1" + katz-ember = "0.2.3" + + [[bin]] + path = "src/plugins/arepa.rs" + + [[bin]] + path = "src/plugins/bbq.rs" + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), + r#" + enter sample.toml + cd bin + cd /dependencies + pwd | echo $it + exit + "# + ); + + assert_eq!(PathBuf::from(actual), PathBuf::from("/dependencies")); + }) +} + +#[test] +fn valuesystem_switch_back_to_previous_working_path() { + Playground::setup("cd_test_11", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [dependencies] + turner-ts = "0.1.1" + robalino-tkd = "0.0.1" + katz-ember = "0.2.3" + odin-gf = "0.2.1" + + [[bin]] + path = "src/plugins/arepa.rs" + + [[bin]] + path = "src/plugins/bbq.rs" + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), + r#" + enter sample.toml + cd dependencies + cd /bin + cd - + pwd | echo $it + exit + "# + ); + + assert_eq!(PathBuf::from(actual), PathBuf::from("/dependencies")); + }) +} + +#[test] +fn valuesystem_change_from_current_path_using_relative_path_and_dash() { + Playground::setup("cd_test_12", |dirs, sandbox| { + sandbox + .with_files(vec![FileWithContent( + "sample.toml", + r#" + [package] + - = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] + + [[bin]] + path = "src/plugins/arepa.rs" + + [[bin]] + path = "src/plugins/bbq.rs" + "# + )]); + + let actual = nu!( + cwd: dirs.test(), + r#" + enter sample.toml + cd package/- + cd /bin + cd - + pwd | echo $it + exit + "# + ); + + assert_eq!(PathBuf::from(actual), PathBuf::from("/package/-")); + }) +} + +#[test] +fn valuesystem_change_current_path_to_parent_path() { + Playground::setup("cd_test_13", |dirs, sandbox| { + sandbox + .with_files(vec![FileWithContent( + "sample.toml", + r#" + [package] + emberenios = ["Yehuda Katz ", "Jonathan Turner ", "Andrés N. Robalino "] + "# + )]); + + let actual = nu!( + cwd: dirs.test(), + r#" + enter sample.toml + cd package/emberenios + cd .. + pwd | echo $it + exit + "# + ); + + assert_eq!(PathBuf::from(actual), PathBuf::from("/package")); + }) +} + +#[test] +fn valuesystem_change_to_home_directory() { + Playground::setup("cd_test_14", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [paquete] + el = "pollo loco" + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), + r#" + enter sample.toml + cd paquete + cd ~ + pwd | echo $it + exit + "# + ); + + assert_eq!(PathBuf::from(actual), PathBuf::from("/")); + }) +} + +#[test] +fn valuesystem_change_to_a_path_containing_spaces() { + Playground::setup("cd_test_15", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + ["pa que te"] + el = "pollo loco" + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), + r#" + enter sample.toml + cd "pa que te" + pwd | echo $it + exit + "# + ); + + assert_eq!(PathBuf::from(actual), PathBuf::from("/").join("pa que te")); + }) +} + +#[test] +fn valuesystem_path_not_found() { + let actual = nu_error!( + cwd: "tests/fixtures/formats", + r#" + enter cargo_sample.toml + cd im_a_path_that_does_not_exist + exit + "# + ); + + assert!(actual.contains("Can not change to path inside")); + assert!(actual.contains("No such path exists")); +} diff --git a/tests/command_config_test.rs b/tests/command_config_test.rs new file mode 100644 index 0000000000..8a45be47c5 --- /dev/null +++ b/tests/command_config_test.rs @@ -0,0 +1,115 @@ +mod helpers; + +use helpers as h; +use helpers::{Playground, Stub::*}; + +use std::path::PathBuf; + +#[test] +fn has_default_configuration_file() { + let expected = "config.toml"; + + Playground::setup("config_test_1", |dirs, _| { + nu!(cwd: dirs.root(), "config"); + + assert_eq!( + dirs.config_path().join(expected), + nu::config_path().unwrap().join(expected) + ); + }) +} + +#[test] +fn shows_path_of_configuration_file() { + let expected = "config.toml"; + + Playground::setup("config_test_2", |dirs, _| { + let actual = nu!( + cwd: dirs.test(), + "config --path | echo $it" + ); + + assert_eq!(PathBuf::from(actual), dirs.config_path().join(expected)); + }); +} + +#[test] +fn use_different_configuration() { + Playground::setup("config_test_3", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "test_3.toml", + r#" + caballero_1 = "Andrés N. Robalino" + caballero_2 = "Jonathan Turner" + caballero_3 = "Yehuda katz" + "#, + )]); + + let actual = nu!( + cwd: dirs.root(), + "config --get caballero_1 --load {}/test_3.toml | echo $it", + dirs.test() + ); + + assert_eq!(actual, "Andrés N. Robalino"); + }); + + h::delete_file_at(nu::config_path().unwrap().join("test_3.toml")); +} + +#[test] +fn sets_configuration_value() { + Playground::setup("config_test_4", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "test_4.toml", + r#" + caballero_1 = "Andrés N. Robalino" + caballero_2 = "Jonathan Turner" + caballero_3 = "Yehuda katz" + "#, + )]); + + nu!( + cwd: dirs.test(), + "config --load test_4.toml --set [caballero_4 jonas]" + ); + + let actual = nu!( + cwd: dirs.root(), + r#"open "{}/test_4.toml" | get caballero_4 | echo $it"#, + dirs.config_path() + ); + + assert_eq!(actual, "jonas"); + }); + + h::delete_file_at(nu::config_path().unwrap().join("test_4.toml")); +} + +// #[test] +// fn removes_configuration_value() { +// Playground::setup("config_test_5", |dirs, sandbox| { +// sandbox.with_files(vec![FileWithContent( +// "test_5.toml", +// r#" +// caballeros = [1, 1, 1] +// podershell = [1, 1, 1] +// "#, +// )]); + +// nu!( +// cwd: dirs.test(), +// "config --load test_5.toml --remove podershell" +// ); + +// let actual = nu_error!( +// cwd: dirs.root(), +// r#"open "{}/test_5.toml" | get podershell | echo $it"#, +// dirs.config_path() +// ); + +// assert!(actual.contains("Unknown column")); +// }); + +// h::delete_file_at(nu::config_path().unwrap().join("test_5.toml")); +// } diff --git a/tests/command_enter_test.rs b/tests/command_enter_test.rs index fe22b56dbe..fc4a437a23 100644 --- a/tests/command_enter_test.rs +++ b/tests/command_enter_test.rs @@ -73,3 +73,16 @@ fn knows_the_filesystems_entered() { )); }) } + +#[test] +fn errors_if_file_not_found() { + Playground::setup("enter_test_2", |dirs, _| { + let actual = nu_error!( + cwd: dirs.test(), + "enter i_dont_exist.csv" + ); + + assert!(actual.contains("File could not be opened")); + assert!(actual.contains("file not found")); + }) +} diff --git a/tests/command_ls_tests.rs b/tests/command_ls_tests.rs index 8ec1d035dc..a0ae959e12 100644 --- a/tests/command_ls_tests.rs +++ b/tests/command_ls_tests.rs @@ -6,52 +6,40 @@ use helpers::{Playground, Stub::*}; #[test] fn ls_lists_regular_files() { Playground::setup("ls_test_1", |dirs, sandbox| { - sandbox - .with_files(vec![ - EmptyFile("yehuda.10.txt"), - EmptyFile("jonathan.10.txt"), - EmptyFile("andres.10.txt"), + sandbox.with_files(vec![ + EmptyFile("yehuda.txt"), + EmptyFile("jonathan.txt"), + EmptyFile("andres.txt"), ]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" ls - | get name - | lines - | split-column "." - | get Column2 - | str --to-int - | sum + | count | echo $it "# )); - assert_eq!(actual, "30"); + assert_eq!(actual, "3"); }) } #[test] fn ls_lists_regular_files_using_asterisk_wildcard() { Playground::setup("ls_test_2", |dirs, sandbox| { - sandbox - .with_files(vec![ - EmptyFile("los.1.txt"), - EmptyFile("tres.1.txt"), - EmptyFile("amigos.1.txt"), - EmptyFile("arepas.1.clu"), + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), ]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" ls *.txt - | get name - | lines - | split-column "." - | get Column2 - | str --to-int - | sum + | count | echo $it "# )); @@ -63,28 +51,22 @@ fn ls_lists_regular_files_using_asterisk_wildcard() { #[test] fn ls_lists_regular_files_using_question_mark_wildcard() { Playground::setup("ls_test_3", |dirs, sandbox| { - sandbox - .with_files(vec![ - EmptyFile("yehuda.10.txt"), - EmptyFile("jonathan.10.txt"), - EmptyFile("andres.10.txt"), - EmptyFile("chicken_not_to_be_picked_up.100.txt"), + sandbox.with_files(vec![ + EmptyFile("yehuda.10.txt"), + EmptyFile("jonathan.10.txt"), + EmptyFile("andres.10.txt"), + EmptyFile("chicken_not_to_be_picked_up.100.txt"), ]); let actual = nu!( cwd: dirs.test(), h::pipeline( r#" ls *.??.txt - | get name - | lines - | split-column "." - | get Column2 - | str --to-int - | sum + | count | echo $it "# )); - assert_eq!(actual, "30"); + assert_eq!(actual, "3"); }) } diff --git a/tests/command_mv_tests.rs b/tests/command_mv_tests.rs index 61ce842759..1d01e23329 100644 --- a/tests/command_mv_tests.rs +++ b/tests/command_mv_tests.rs @@ -26,11 +26,7 @@ fn moves_a_file() { #[test] fn overwrites_if_moving_to_existing_file() { Playground::setup("mv_test_2", |dirs, sandbox| { - sandbox - .with_files(vec![ - EmptyFile("andres.txt"), - EmptyFile("jonathan.txt") - ]); + sandbox.with_files(vec![EmptyFile("andres.txt"), EmptyFile("jonathan.txt")]); let original = dirs.test().join("andres.txt"); let expected = dirs.test().join("jonathan.txt"); @@ -142,7 +138,7 @@ fn moves_using_path_with_wildcard() { EmptyFile("sgml_description.json"), EmptyFile("sample.ini"), EmptyFile("utf16.ini"), - EmptyFile("yehuda.ini") + EmptyFile("yehuda.ini"), ]) .mkdir("work_dir") .mkdir("expected"); @@ -150,10 +146,7 @@ fn moves_using_path_with_wildcard() { let work_dir = dirs.test().join("work_dir"); let expected = dirs.test().join("expected"); - nu!( - cwd: work_dir, - "mv ../originals/*.ini ../expected" - ); + nu!(cwd: work_dir, "mv ../originals/*.ini ../expected"); assert!(h::files_exist_at( vec!["yehuda.ini", "jonathan.ini", "sample.ini", "andres.ini",], @@ -170,7 +163,7 @@ fn moves_using_a_glob() { .with_files(vec![ EmptyFile("arepa.txt"), EmptyFile("empanada.txt"), - EmptyFile("taquiza.txt") + EmptyFile("taquiza.txt"), ]) .mkdir("work_dir") .mkdir("expected"); @@ -179,10 +172,7 @@ fn moves_using_a_glob() { let work_dir = dirs.test().join("work_dir"); let expected = dirs.test().join("expected"); - nu!( - cwd: work_dir, - "mv ../meals/* ../expected" - ); + nu!(cwd: work_dir, "mv ../meals/* ../expected"); assert!(meal_dir.exists()); assert!(h::files_exist_at( diff --git a/tests/command_open_tests.rs b/tests/command_open_tests.rs index 968bc7531b..48f438f3d6 100644 --- a/tests/command_open_tests.rs +++ b/tests/command_open_tests.rs @@ -30,6 +30,31 @@ fn recognizes_csv() { }) } +// sample.bson has the following format: +// ━━━━━━━━━━┯━━━━━━━━━━━ +// _id │ root +// ──────────┼─────────── +// [object] │ [9 items] +// ━━━━━━━━━━┷━━━━━━━━━━━ +// +// the root value is: +// ━━━┯━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━ +// # │ _id │ a │ b │ c +// ───┼───────────────────┼─────────────────────────┼──────────┼────────── +// 0 │ [object] │ 1.000000000000000 │ hello │ [2 items] +// 1 │ [object] │ 42.00000000000000 │ whel │ hello +// 2 │ [object] │ [object] │ │ +// 3 │ [object] │ │ [object] │ +// 4 │ [object] │ │ │ [object] +// 5 │ [object] │ │ │ [object] +// 6 │ [object] │ [object] │ [object] │ +// 7 │ [object] │ │ [object] │ +// 8 │ 1.000000 │ │ [object] │ +// +// The decimal value is supposed to be π, but is currently wrong due to +// what appears to be an issue in the bson library that is under investigation. +// + #[test] fn open_can_parse_bson_1() { let actual = nu!( @@ -57,6 +82,55 @@ fn open_can_parse_bson_2() { assert_eq!(actual, "function"); } +// sample.db has the following format: +// +// ━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━ +// # │ table_name │ table_values +// ───┼────────────┼────────────── +// 0 │ strings │ [6 items] +// 1 │ ints │ [5 items] +// 2 │ floats │ [4 items] +// ━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━ +// +// In this case, this represents a sqlite database +// with three tables named `strings`, `ints`, and `floats`. +// The table_values represent the values for the tables: +// +// ━━━━┯━━━━━━━┯━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +// # │ x │ y │ z │ f +// ────┼───────┼──────────┼──────┼────────────────────────────────────────────────────────────────────── +// 0 │ hello │ │ │ +// 1 │ hello │ │ │ +// 2 │ hello │ │ │ +// 3 │ hello │ │ │ +// 4 │ world │ │ │ +// 5 │ world │ │ │ +// 6 │ │ │ 1 │ +// 7 │ │ │ 42 │ +// 8 │ │ │ 425 │ +// 9 │ │ │ 4253 │ +// 10 │ │ │ │ +// 11 │ │ │ │ 3.400000000000000 +// 12 │ │ │ │ 3.141592650000000 +// 13 │ │ │ │ 23.00000000000000 +// 14 │ │ │ │ this string that doesn't really belong here but sqlite is what it is +// ━━━━┷━━━━━━━┷━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +// +// We can see here that each table has different columns. `strings` has `x` and `y`, while +// `ints` has just `z`, and `floats` has only the column `f`. This means, in general, when working +// with sqlite, one will want to select a single table, e.g.: +// +// open sample.db | nth 1 | get table_values +// ━━━┯━━━━━━ +// # │ z +// ───┼────── +// 0 │ 1 +// 1 │ 42 +// 2 │ 425 +// 3 │ 4253 +// 4 │ +// ━━━┷━━━━━━ + #[test] fn open_can_parse_sqlite() { let actual = nu!( @@ -138,7 +212,7 @@ fn open_can_parse_ini() { fn open_can_parse_utf16_ini() { let actual = nu!( cwd: "tests/fixtures/formats", - "open utf16.ini | get .ShellClassInfo | get IconIndex | echo $it" + "open utf16.ini | get '.ShellClassInfo' | get IconIndex | echo $it" ); assert_eq!(actual, "-236") @@ -148,8 +222,9 @@ fn open_can_parse_utf16_ini() { fn errors_if_file_not_found() { let actual = nu_error!( cwd: "tests/fixtures/formats", - "open i_dont_exist.txt | echo $it" + "open i_dont_exist.txt" ); assert!(actual.contains("File could not be opened")); + assert!(actual.contains("file not found")); } diff --git a/tests/command_rm_tests.rs b/tests/command_rm_tests.rs index 568219e170..9317b586a3 100644 --- a/tests/command_rm_tests.rs +++ b/tests/command_rm_tests.rs @@ -6,9 +6,7 @@ use helpers::{Playground, Stub::*}; #[test] fn rm_removes_a_file() { Playground::setup("rm_test_1", |dirs, sandbox| { - sandbox - .with_files(vec![EmptyFile("i_will_be_deleted.txt") - ]); + sandbox.with_files(vec![EmptyFile("i_will_be_deleted.txt")]); nu!( cwd: dirs.root(), @@ -29,7 +27,7 @@ fn rm_removes_files_with_wildcard() { .with_files(vec![ EmptyFile("cli.rs"), EmptyFile("lib.rs"), - EmptyFile("prelude.rs") + EmptyFile("prelude.rs"), ]) .within("src/parser") .with_files(vec![EmptyFile("parse.rs"), EmptyFile("parser.rs")]) @@ -38,8 +36,8 @@ fn rm_removes_files_with_wildcard() { .within("src/parser/hir") .with_files(vec![ EmptyFile("baseline_parse.rs"), - EmptyFile("baseline_parse_tokens.rs") - ]); + EmptyFile("baseline_parse_tokens.rs"), + ]); nu!( cwd: dirs.test(), @@ -70,7 +68,7 @@ fn rm_removes_deeply_nested_directories_with_wildcard_and_recursive_flag() { .with_files(vec![ EmptyFile("cli.rs"), EmptyFile("lib.rs"), - EmptyFile("prelude.rs") + EmptyFile("prelude.rs"), ]) .within("src/parser") .with_files(vec![EmptyFile("parse.rs"), EmptyFile("parser.rs")]) @@ -79,8 +77,8 @@ fn rm_removes_deeply_nested_directories_with_wildcard_and_recursive_flag() { .within("src/parser/hir") .with_files(vec![ EmptyFile("baseline_parse.rs"), - EmptyFile("baseline_parse_tokens.rs") - ]); + EmptyFile("baseline_parse_tokens.rs"), + ]); nu!( cwd: dirs.test(), @@ -109,11 +107,10 @@ fn rm_removes_directory_contents_without_recursive_flag_if_empty() { #[test] fn rm_removes_directory_contents_with_recursive_flag() { Playground::setup("rm_test_5", |dirs, sandbox| { - sandbox - .with_files(vec![ - EmptyFile("yehuda.txt"), - EmptyFile("jonathan.txt"), - EmptyFile("andres.txt") + sandbox.with_files(vec![ + EmptyFile("yehuda.txt"), + EmptyFile("jonathan.txt"), + EmptyFile("andres.txt"), ]); nu!( @@ -128,9 +125,7 @@ fn rm_removes_directory_contents_with_recursive_flag() { #[test] fn rm_errors_if_attempting_to_delete_a_directory_with_content_without_recursive_flag() { Playground::setup("rm_test_6", |dirs, sandbox| { - sandbox - .with_files(vec![EmptyFile("some_empty_file.txt") - ]); + sandbox.with_files(vec![EmptyFile("some_empty_file.txt")]); let actual = nu_error!( cwd: dirs.root(), diff --git a/tests/commands_test.rs b/tests/commands_test.rs index 4d3c9d0086..87e1182b10 100644 --- a/tests/commands_test.rs +++ b/tests/commands_test.rs @@ -3,6 +3,243 @@ mod helpers; use helpers as h; use helpers::{Playground, Stub::*}; +#[test] +fn group_by() { + Playground::setup("group_by_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.csv", + r#" + first_name,last_name,rusty_luck,type + Andrés,Robalino,1,A + Jonathan,Turner,1,B + Yehuda,Katz,1,A + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open los_tres_caballeros.csv + | group-by type + | get A + | count + | echo $it + "# + )); + + assert_eq!(actual, "2"); + }) +} + +#[test] +fn group_by_errors_if_unknown_column_name() { + Playground::setup("group_by_test_2", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "los_tres_caballeros.csv", + r#" + first_name,last_name,rusty_luck,type + Andrés,Robalino,1,A + Jonathan,Turner,1,B + Yehuda,Katz,1,A + "#, + )]); + + let actual = nu_error!( + cwd: dirs.test(), h::pipeline( + r#" + open los_tres_caballeros.csv + | group-by ttype + "# + )); + + assert!(actual.contains("Unknown column")); + }) +} + +#[test] +fn first_gets_first_rows_by_amount() { + Playground::setup("first_test_1", |dirs, sandbox| { + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), + ]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | first 3 + | count + | echo $it + "# + )); + + assert_eq!(actual, "3"); + }) +} + +#[test] +fn first_gets_all_rows_if_amount_higher_than_all_rows() { + Playground::setup("first_test_2", |dirs, sandbox| { + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), + ]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | first 99 + | count + | echo $it + "# + )); + + assert_eq!(actual, "4"); + }) +} + +#[test] +fn first_gets_first_row_when_no_amount_given() { + Playground::setup("first_test_3", |dirs, sandbox| { + sandbox.with_files(vec![EmptyFile("caballeros.txt"), EmptyFile("arepas.clu")]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | first + | count + | echo $it + "# + )); + + assert_eq!(actual, "1"); + }) +} + +#[test] +fn last_gets_last_rows_by_amount() { + Playground::setup("last_test_1", |dirs, sandbox| { + sandbox.with_files(vec![ + EmptyFile("los.txt"), + EmptyFile("tres.txt"), + EmptyFile("amigos.txt"), + EmptyFile("arepas.clu"), + ]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | last 3 + | count + | echo $it + "# + )); + + assert_eq!(actual, "3"); + }) +} + +#[test] +fn last_gets_last_row_when_no_amount_given() { + Playground::setup("last_test_2", |dirs, sandbox| { + sandbox.with_files(vec![EmptyFile("caballeros.txt"), EmptyFile("arepas.clu")]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + ls + | last + | count + | echo $it + "# + )); + + assert_eq!(actual, "1"); + }) +} + +#[test] +fn get() { + Playground::setup("get_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + nu_party_venue = "zion" + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get nu_party_venue + | echo $it + "# + )); + + assert_eq!(actual, "zion"); + }) +} + +#[test] +fn get_more_than_one_member() { + Playground::setup("get_test_2", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" + [[fortune_tellers]] + name = "Andrés N. Robalino" + arepas = 1 + broken_builds = 0 + + [[fortune_tellers]] + name = "Jonathan Turner" + arepas = 1 + broken_builds = 1 + + [[fortune_tellers]] + name = "Yehuda Katz" + arepas = 1 + broken_builds = 1 + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open sample.toml + | get fortune_tellers + | get arepas broken_builds + | sum + | echo $it + "# + )); + + assert_eq!(actual, "5"); + }) +} + +#[test] +fn get_requires_at_least_one_member() { + Playground::setup("first_test_3", |dirs, sandbox| { + sandbox.with_files(vec![EmptyFile("andres.txt")]); + + let actual = nu_error!( + cwd: dirs.test(), "ls | get" + ); + + assert!(actual.contains("requires member parameter")); + }) +} + #[test] fn lines() { let actual = nu!( @@ -36,8 +273,8 @@ fn save_figures_out_intelligently_where_to_write_out_with_metadata() { description = "A shell for the GitHub era" license = "ISC" edition = "2018" - "#) - ]); + "#, + )]); let subject_file = dirs.test().join("cargo_sample.toml"); @@ -51,6 +288,33 @@ fn save_figures_out_intelligently_where_to_write_out_with_metadata() { }) } +#[test] +fn it_arg_works_with_many_inputs_to_external_command() { + Playground::setup("it_arg_works_with_many_inputs", |dirs, sandbox| { + sandbox.with_files(vec![ + FileWithContent("file1", "text"), + FileWithContent("file2", " and more text"), + ]); + + let (stdout, stderr) = nu_combined!( + cwd: dirs.test(), h::pipeline( + r#" + echo hello world + | split-row " " + | ^echo $it + "# + )); + + #[cfg(windows)] + assert_eq!("hello world", stdout); + + #[cfg(not(windows))] + assert_eq!("helloworld", stdout); + + assert!(!stderr.contains("No such file or directory")); + }) +} + #[test] fn save_can_write_out_csv() { Playground::setup("save_test_2", |dirs, _| { @@ -63,6 +327,38 @@ fn save_can_write_out_csv() { ); let actual = h::file_contents(expected_file); - assert!(actual.contains("[list list],A shell for the GitHub era,2018,ISC,nu,0.2.0")); + assert!(actual.contains("[Table],A shell for the GitHub era,2018,ISC,nu,0.2.0")); + }) +} + +// This test is more tricky since we are checking for binary output. The output rendered in ASCII is (roughly): +// �authors+0Yehuda Katz descriptionA shell for the GitHub eraedition2018licenseISCnamenuversion0.2.0 +// It is not valid utf-8, so this is just an approximation. +#[test] +fn save_can_write_out_bson() { + Playground::setup("save_test_3", |dirs, _| { + let expected_file = dirs.test().join("cargo_sample.bson"); + + nu!( + cwd: dirs.root(), + "open {}/cargo_sample.toml | inc package.version --minor | get package | save save_test_3/cargo_sample.bson", + dirs.formats() + ); + + let actual = h::file_contents_binary(expected_file); + assert!( + actual + == vec![ + 168, 0, 0, 0, 4, 97, 117, 116, 104, 111, 114, 115, 0, 43, 0, 0, 0, 2, 48, 0, + 31, 0, 0, 0, 89, 101, 104, 117, 100, 97, 32, 75, 97, 116, 122, 32, 60, 119, + 121, 99, 97, 116, 115, 64, 103, 109, 97, 105, 108, 46, 99, 111, 109, 62, 0, 0, + 2, 100, 101, 115, 99, 114, 105, 112, 116, 105, 111, 110, 0, 27, 0, 0, 0, 65, + 32, 115, 104, 101, 108, 108, 32, 102, 111, 114, 32, 116, 104, 101, 32, 71, 105, + 116, 72, 117, 98, 32, 101, 114, 97, 0, 2, 101, 100, 105, 116, 105, 111, 110, 0, + 5, 0, 0, 0, 50, 48, 49, 56, 0, 2, 108, 105, 99, 101, 110, 115, 101, 0, 4, 0, 0, + 0, 73, 83, 67, 0, 2, 110, 97, 109, 101, 0, 3, 0, 0, 0, 110, 117, 0, 2, 118, + 101, 114, 115, 105, 111, 110, 0, 6, 0, 0, 0, 48, 46, 50, 46, 48, 0, 0 + ] + ); }) } diff --git a/tests/external_tests.rs b/tests/external_tests.rs index 7aabd592db..0d810acac1 100644 --- a/tests/external_tests.rs +++ b/tests/external_tests.rs @@ -3,8 +3,8 @@ mod helpers; #[test] fn external_command() { let actual = nu!( - cwd: "tests/fixtures", - "echo 1" + cwd: "tests/fixtures", + "echo 1" ); assert!(actual.contains("1")); diff --git a/tests/filter_inc_tests.rs b/tests/filter_inc_tests.rs index 9ef0b311e6..658e24308b 100644 --- a/tests/filter_inc_tests.rs +++ b/tests/filter_inc_tests.rs @@ -15,13 +15,12 @@ fn can_only_apply_one() { #[test] fn by_one_with_field_passed() { Playground::setup("plugin_inc_test_1", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" [package] edition = "2018" - "# + "#, )]); let actual = nu!( @@ -36,13 +35,12 @@ fn by_one_with_field_passed() { #[test] fn by_one_with_no_field_passed() { Playground::setup("plugin_inc_test_2", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" [package] contributors = "2" - "# + "#, )]); let actual = nu!( @@ -57,13 +55,12 @@ fn by_one_with_no_field_passed() { #[test] fn semversion_major_inc() { Playground::setup("plugin_inc_test_3", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" [package] version = "0.1.3" - "# + "#, )]); let actual = nu!( @@ -78,13 +75,12 @@ fn semversion_major_inc() { #[test] fn semversion_minor_inc() { Playground::setup("plugin_inc_test_4", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" [package] version = "0.1.3" - "# + "#, )]); let actual = nu!( @@ -99,13 +95,12 @@ fn semversion_minor_inc() { #[test] fn semversion_patch_inc() { Playground::setup("plugin_inc_test_5", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" [package] version = "0.1.3" - "# + "#, )]); let actual = nu!( @@ -120,13 +115,12 @@ fn semversion_patch_inc() { #[test] fn semversion_without_passing_field() { Playground::setup("plugin_inc_test_6", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" [package] version = "0.1.3" - "# + "#, )]); let actual = nu!( diff --git a/tests/filter_str_tests.rs b/tests/filter_str_tests.rs index 03c9d63532..9f92186fa6 100644 --- a/tests/filter_str_tests.rs +++ b/tests/filter_str_tests.rs @@ -1,7 +1,7 @@ mod helpers; -use helpers as h; use h::{Playground, Stub::*}; +use helpers as h; #[test] fn can_only_apply_one() { @@ -10,9 +10,7 @@ fn can_only_apply_one() { "open caco3_plastics.csv | first 1 | str origin --downcase --upcase" ); - assert!( - actual.contains("Usage: str field [--downcase|--upcase|--to-int|--replace|--find-replace]") - ); + assert!(actual.contains("Usage: str field [--downcase|--upcase|--to-int")); } #[test] @@ -39,10 +37,9 @@ fn acts_without_passing_field() { #[test] fn downcases() { Playground::setup("plugin_str_test_2", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" [dependency] name = "LIGHT" "#, @@ -60,10 +57,9 @@ fn downcases() { #[test] fn upcases() { Playground::setup("plugin_str_test_3", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" + sandbox.with_files(vec![FileWithContent( + "sample.toml", + r#" [package] name = "nushell" "#, @@ -94,81 +90,3 @@ fn converts_to_int() { assert_eq!(actual, "2509000000"); } - -#[test] -fn replaces() { - Playground::setup("plugin_str_test_4", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" - [package] - name = "nushell" - "#, - )]); - - let actual = nu!( - cwd: dirs.test(), h::pipeline( - r#" - open sample.toml - | str package.name --replace wykittenshell - | get package.name - | echo $it - "# - )); - - assert_eq!(actual, "wykittenshell"); - }) -} - -#[test] -fn find_and_replaces() { - Playground::setup("plugin_str_test_5", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" - [fortune.teller] - phone = "1-800-KATZ" - "#, - )]); - - let actual = nu!( - cwd: dirs.test(), h::pipeline( - r#" - open sample.toml - | str fortune.teller.phone --find-replace KATZ "5289" - | get fortune.teller.phone - | echo $it - "# - )); - - assert_eq!(actual, "1-800-5289"); - }) -} - -#[test] -fn find_and_replaces_without_passing_field() { - Playground::setup("plugin_str_test_6", |dirs, sandbox| { - sandbox - .with_files(vec![FileWithContent( - "sample.toml", - r#" - [fortune.teller] - phone = "1-800-KATZ" - "#, - )]); - - let actual = nu!( - cwd: dirs.test(), h::pipeline( - r#" - open sample.toml - | get fortune.teller.phone - | str --find-replace KATZ "5289" - | echo $it - "# - )); - - assert_eq!(actual, "1-800-5289"); - }) -} diff --git a/tests/filters_test.rs b/tests/filters_test.rs index e3ebcd1a6d..1eb55448b7 100644 --- a/tests/filters_test.rs +++ b/tests/filters_test.rs @@ -35,7 +35,7 @@ fn converts_structured_table_to_csv_text() { | to-csv | lines | nth 1 - | echo "$it" + | echo $it "# )); @@ -63,7 +63,7 @@ fn converts_structured_table_to_csv_text_skipping_headers_after_conversion() { | split-column "," a b c d origin | last 1 | to-csv --headerless - | echo "$it" + | echo $it "# )); @@ -218,8 +218,8 @@ fn converts_structured_table_to_json_text() { | split-column "," name luck | pick name | to-json - | nth 0 | from-json + | nth 0 | get name | echo $it "# @@ -261,7 +261,7 @@ fn converts_structured_table_to_tsv_text() { | to-tsv | lines | nth 1 - | echo "$it" + | echo $it "# )); @@ -289,7 +289,7 @@ fn converts_structured_table_to_tsv_text_skipping_headers_after_conversion() { | split-column "\t" a b c d origin | last 1 | to-tsv --headerless - | echo "$it" + | echo $it "# )); @@ -355,6 +355,90 @@ fn converts_from_tsv_text_skipping_headers_to_structured_table() { }) } +#[test] +fn converts_from_ssv_text_to_structured_table() { + Playground::setup("filter_from_ssv_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "oc_get_svc.txt", + r#" + NAME LABELS SELECTOR IP PORT(S) + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.txt + | from-ssv + | nth 0 + | get IP + | echo $it + "# + )); + + assert_eq!(actual, "172.30.78.158"); + }) +} + +#[test] +fn converts_from_ssv_text_to_structured_table_with_separator_specified() { + Playground::setup("filter_from_ssv_test_1", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "oc_get_svc.txt", + r#" + NAME LABELS SELECTOR IP PORT(S) + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.txt + | from-ssv --minimum-spaces 3 + | nth 0 + | get IP + | echo $it + "# + )); + + assert_eq!(actual, "172.30.78.158"); + }) +} + +#[test] +fn converts_from_ssv_text_skipping_headers_to_structured_table() { + Playground::setup("filter_from_ssv_test_2", |dirs, sandbox| { + sandbox.with_files(vec![FileWithContentToBeTrimmed( + "oc_get_svc.txt", + r#" + NAME LABELS SELECTOR IP PORT(S) + docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP + kubernetes component=apiserver,provider=kubernetes 172.30.0.2 443/TCP + kubernetes-ro component=apiserver,provider=kubernetes 172.30.0.1 80/TCP + "#, + )]); + + let actual = nu!( + cwd: dirs.test(), h::pipeline( + r#" + open oc_get_svc.txt + | from-ssv --headerless + | nth 2 + | get Column2 + | echo $it + "# + )); + + assert_eq!(actual, "component=apiserver,provider=kubernetes"); + }) +} + #[test] fn can_convert_table_to_bson_and_back_into_table() { let actual = nu!( @@ -423,6 +507,22 @@ fn can_convert_table_to_yaml_text_and_from_yaml_text_back_into_table() { assert_eq!(actual, "nushell"); } +#[test] +fn can_encode_and_decode_urlencoding() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open sample.url + | to-url + | from-url + | get cheese + | echo $it + "# + )); + + assert_eq!(actual, "comté"); +} + #[test] fn can_sort_by_column() { let actual = nu!( @@ -479,6 +579,31 @@ fn can_sum() { assert_eq!(actual, "203") } +#[test] +fn can_average_numbers() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open sgml_description.json + | get glossary.GlossDiv.GlossList.GlossEntry.Sections + | average + | echo $it + "# + )); + + assert_eq!(actual, "101.5000000000000") +} + +#[test] +fn can_average_bytes() { + let actual = nu!( + cwd: "tests/fixtures/formats", + "ls | sort-by name | skip 1 | first 2 | get size | average | echo $it" + ); + + assert_eq!(actual, "1600.000000000000"); +} + #[test] fn can_filter_by_unit_size_comparison() { let actual = nu!( diff --git a/tests/fixtures/formats/fileA.txt b/tests/fixtures/formats/fileA.txt new file mode 100644 index 0000000000..0ce9fb3fa2 --- /dev/null +++ b/tests/fixtures/formats/fileA.txt @@ -0,0 +1,3 @@ +VAR1=Chill +VAR2=StupidLongName +VAR3=AlsoChill diff --git a/tests/fixtures/formats/sample.url b/tests/fixtures/formats/sample.url new file mode 100644 index 0000000000..361d70dbb6 --- /dev/null +++ b/tests/fixtures/formats/sample.url @@ -0,0 +1 @@ +bread=baguette&cheese=comt%C3%A9&meat=ham&fat=butter \ No newline at end of file diff --git a/tests/helpers/mod.rs b/tests/helpers/mod.rs index 538959e9ea..86c8a10e7f 100644 --- a/tests/helpers/mod.rs +++ b/tests/helpers/mod.rs @@ -4,6 +4,7 @@ use glob::glob; pub use std::path::Path; pub use std::path::PathBuf; +use app_dirs::{get_app_root, AppDataType}; use getset::Getters; use std::io::Read; use tempfile::{tempdir, TempDir}; @@ -92,6 +93,7 @@ macro_rules! nu { .write_all(commands.as_bytes()) .expect("couldn't write to stdin"); + let output = process .wait_with_output() .expect("couldn't read from stdout"); @@ -153,6 +155,60 @@ macro_rules! nu_error { }}; } +#[macro_export] +macro_rules! nu_combined { + (cwd: $cwd:expr, $path:expr, $($part:expr),*) => {{ + use $crate::helpers::DisplayPath; + + let path = format!($path, $( + $part.display_path() + ),*); + + nu_combined!($cwd, &path) + }}; + + (cwd: $cwd:expr, $path:expr) => {{ + nu_combined!($cwd, $path) + }}; + + ($cwd:expr, $path:expr) => {{ + pub use std::error::Error; + pub use std::io::prelude::*; + pub use std::process::{Command, Stdio}; + + let commands = &*format!( + " + cd {} + {} + exit", + $crate::helpers::in_directory($cwd), + $crate::helpers::DisplayPath::display_path(&$path) + ); + + let mut process = Command::new(helpers::executable_path()) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .expect("couldn't run test"); + + let stdin = process.stdin.as_mut().expect("couldn't open stdin"); + stdin + .write_all(commands.as_bytes()) + .expect("couldn't write to stdin"); + + let output = process + .wait_with_output() + .expect("couldn't read from stdout/stderr"); + + let err = String::from_utf8_lossy(&output.stderr).into_owned(); + let out = String::from_utf8_lossy(&output.stdout).into_owned(); + let out = out.replace("\r\n", ""); + let out = out.replace("\n", ""); + (out, err) + }}; +} + pub enum Stub<'a> { FileWithContent(&'a str, &'a str), FileWithContentToBeTrimmed(&'a str, &'a str), @@ -177,6 +233,10 @@ impl Dirs { pub fn formats(&self) -> PathBuf { PathBuf::from(self.fixtures.join("formats")) } + + pub fn config_path(&self) -> PathBuf { + get_app_root(AppDataType::UserConfig, &nu::APP_INFO).unwrap() + } } impl Playground { @@ -227,8 +287,13 @@ impl Playground { playground_root.join(topic).display() )); + let root = dunce::canonicalize(playground_root).expect(&format!( + "Couldn't canonicalize tests root path {}", + playground_root.display() + )); + let dirs = Dirs { - root: PathBuf::from(playground_root), + root, test, fixtures, }; @@ -307,6 +372,13 @@ pub fn file_contents(full_path: impl AsRef) -> String { contents } +pub fn file_contents_binary(full_path: impl AsRef) -> Vec { + let mut file = std::fs::File::open(full_path.as_ref()).expect("can not open file"); + let mut contents = Vec::new(); + file.read_to_end(&mut contents).expect("can not read file"); + contents +} + pub fn line_ending() -> String { #[cfg(windows)] { @@ -319,15 +391,11 @@ pub fn line_ending() -> String { } } -pub fn normalize_string(input: &str) -> String { - #[cfg(windows)] - { - input.to_string() - } +pub fn delete_file_at(full_path: impl AsRef) { + let full_path = full_path.as_ref(); - #[cfg(not(windows))] - { - format!("\"{}\"", input) + if full_path.exists() { + std::fs::remove_file(full_path).expect("can not delete file"); } } @@ -369,13 +437,13 @@ pub fn in_directory(str: impl AsRef) -> String { str.as_ref().display().to_string() } - pub fn pipeline(commands: &str) -> String { - commands.lines() - .skip(1) - .map(|line| line.trim()) - .collect::>() - .join(" ") - .trim_end() - .to_string() + commands + .lines() + .skip(1) + .map(|line| line.trim()) + .collect::>() + .join(" ") + .trim_end() + .to_string() } diff --git a/tests/tests.rs b/tests/tests.rs index 4affb44223..14552a41ee 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -12,9 +12,13 @@ fn pipeline_helper() { | str --to-int | sum | echo "$it" - "#); + "#, + ); - assert_eq!(actual, r#"open los_tres_amigos.txt | from-csv | get rusty_luck | str --to-int | sum | echo "$it""#); + assert_eq!( + actual, + r#"open los_tres_amigos.txt | from-csv | get rusty_luck | str --to-int | sum | echo "$it""# + ); } #[test] @@ -34,9 +38,7 @@ fn external_has_correct_quotes() { r#"echo "hello world""# ); - let actual = h::normalize_string(&actual); - - assert_eq!(actual, r#""hello world""#); + assert_eq!(actual, r#"hello world"#); } #[test] @@ -54,6 +56,54 @@ fn add_plugin() { assert_eq!(actual, "1"); } +#[test] +fn read_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open fileA.txt + | read "{Name}={Value}" + | nth 1 + | get Value + | echo $it + "# + )); + + assert_eq!(actual, "StupidLongName"); +} + +#[test] +fn prepend_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open fileA.txt + | lines + | prepend "testme" + | nth 0 + | echo $it + "# + )); + + assert_eq!(actual, "testme"); +} + +#[test] +fn append_plugin() { + let actual = nu!( + cwd: "tests/fixtures/formats", h::pipeline( + r#" + open fileA.txt + | lines + | append "testme" + | nth 3 + | echo $it + "# + )); + + assert_eq!(actual, "testme"); +} + #[test] fn edit_plugin() { let actual = nu!(