mirror of
https://github.com/nushell/nushell.git
synced 2025-07-08 18:37:07 +02:00
Compare commits
55 Commits
Author | SHA1 | Date | |
---|---|---|---|
2590fcbe5c | |||
09691ff866 | |||
16db368232 | |||
df87d90b8c | |||
f2f01b8a4d | |||
6c0190cd38 | |||
b26246bf12 | |||
36a4effbb2 | |||
9fca417f8c | |||
d09e1148b2 | |||
493bc2b1c9 | |||
74b812228c | |||
649b3804c1 | |||
df6a53f52e | |||
c4af5df828 | |||
f94a3e15f5 | |||
75782f0f50 | |||
2b06ce27d3 | |||
72c241348b | |||
ab2d2db987 | |||
07e05ef183 | |||
a986de8ad0 | |||
22cfe4391e | |||
97d17311f4 | |||
0f6fd30619 | |||
e1ebd461d2 | |||
f000d5d0a1 | |||
574c5961c8 | |||
69708f7244 | |||
62c5df5fc6 | |||
92c855a412 | |||
d395816929 | |||
5e34ef6dff | |||
d567c58cc1 | |||
4e0d7bc77c | |||
32581497ef | |||
d6df367c6b | |||
4e6327de1d | |||
b3d8666db0 | |||
1de7c3d033 | |||
962b258cc6 | |||
59697cab63 | |||
349af05da8 | |||
b3b3cf0689 | |||
5d59234f8d | |||
4f7b423f36 | |||
f7043bf690 | |||
1297499d7a | |||
bd0baa961c | |||
4ee536f044 | |||
8581bec891 | |||
8bcbc8eeb3 | |||
c164ef5489 | |||
cc3653cfd9 | |||
7fc65067cf |
118
.github/workflows/docker-publish.yml
vendored
118
.github/workflows/docker-publish.yml
vendored
@ -1,118 +0,0 @@
|
||||
name: Publish consumable Docker images
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ['v?[0-9]+.[0-9]+.[0-9]+*']
|
||||
|
||||
jobs:
|
||||
compile:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
arch:
|
||||
- x86_64-unknown-linux-musl
|
||||
- x86_64-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install rust-embedded/cross
|
||||
env: { VERSION: v0.1.16 }
|
||||
run: >-
|
||||
wget -nv https://github.com/rust-embedded/cross/releases/download/${VERSION}/cross-${VERSION}-x86_64-unknown-linux-gnu.tar.gz
|
||||
-O- | sudo tar xz -C /usr/local/bin/
|
||||
- name: compile for specific target
|
||||
env: { arch: '${{ matrix.arch }}' }
|
||||
run: |
|
||||
cross build --target ${{ matrix.arch }} --release
|
||||
# leave only the executable file
|
||||
rm -frd target/${{ matrix.arch }}/release/{*/*,*.d,*.rlib,.fingerprint}
|
||||
find . -empty -delete
|
||||
- uses: actions/upload-artifact@master
|
||||
with:
|
||||
name: ${{ matrix.arch }}
|
||||
path: target/${{ matrix.arch }}/release
|
||||
|
||||
docker:
|
||||
name: Build and publish docker images
|
||||
needs: compile
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCKER_REGISTRY: quay.io/nushell
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_REGISTRY }}
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
||||
strategy:
|
||||
matrix:
|
||||
tag:
|
||||
- alpine
|
||||
- slim
|
||||
- debian
|
||||
- glibc-busybox
|
||||
- musl-busybox
|
||||
- musl-distroless
|
||||
- glibc-distroless
|
||||
- glibc
|
||||
- musl
|
||||
include:
|
||||
- { tag: alpine, base-image: alpine, arch: x86_64-unknown-linux-musl, plugin: true, use-patch: false}
|
||||
- { tag: slim, base-image: 'debian:stable-slim', arch: x86_64-unknown-linux-gnu, plugin: true, use-patch: false}
|
||||
- { tag: debian, base-image: debian, arch: x86_64-unknown-linux-gnu, plugin: true, use-patch: false}
|
||||
- { tag: glibc-busybox, base-image: 'busybox:glibc', arch: x86_64-unknown-linux-gnu, plugin: false, use-patch: true }
|
||||
- { tag: musl-busybox, base-image: 'busybox:musl', arch: x86_64-unknown-linux-musl, plugin: false, use-patch: false}
|
||||
- { tag: musl-distroless, base-image: 'gcr.io/distroless/static', arch: x86_64-unknown-linux-musl, plugin: false, use-patch: false}
|
||||
- { tag: glibc-distroless, base-image: 'gcr.io/distroless/cc', arch: x86_64-unknown-linux-gnu, plugin: false, use-patch: true }
|
||||
- { tag: glibc, base-image: scratch, arch: x86_64-unknown-linux-gnu, plugin: false, use-patch: false}
|
||||
- { tag: musl, base-image: scratch, arch: x86_64-unknown-linux-musl, plugin: false, use-patch: false}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/download-artifact@master
|
||||
with: { name: '${{ matrix.arch }}', path: target/release }
|
||||
- name: Build and publish exact version
|
||||
run: |-
|
||||
export DOCKER_TAG=${GITHUB_REF##*/}-${{ matrix.tag }}
|
||||
export NU_BINS=target/release/$( [ ${{ matrix.plugin }} = true ] && echo nu* || echo nu )
|
||||
export PATCH=$([ ${{ matrix.use-patch }} = true ] && echo .${{ matrix.tag }} || echo '')
|
||||
chmod +x $NU_BINS
|
||||
|
||||
echo ${DOCKER_PASSWORD} | docker login ${DOCKER_REGISTRY} -u ${DOCKER_USER} --password-stdin
|
||||
docker-compose --file docker/docker-compose.package.yml build
|
||||
docker-compose --file docker/docker-compose.package.yml push # exact version
|
||||
env:
|
||||
BASE_IMAGE: ${{ matrix.base-image }}
|
||||
|
||||
#region semantics tagging
|
||||
- name: Retag and push with suffixed version
|
||||
run: |-
|
||||
VERSION=${GITHUB_REF##*/}
|
||||
|
||||
latest_version=${VERSION%%%.*}-${{ matrix.tag }}
|
||||
latest_feature=${VERSION%%.*}-${{ matrix.tag }}
|
||||
latest_patch=${VERSION%.*}-${{ matrix.tag }}
|
||||
exact_version=${VERSION}-${{ matrix.tag }}
|
||||
|
||||
tags=( ${latest_version} ${latest_feature} ${latest_patch} ${exact_version} )
|
||||
|
||||
for tag in ${tags[@]}; do
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${tag}
|
||||
docker push ${DOCKER_REGISTRY}/nu:${tag}
|
||||
done
|
||||
|
||||
# latest version
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${{ matrix.tag }}
|
||||
docker push ${DOCKER_REGISTRY}/nu:${{ matrix.tag }}
|
||||
|
||||
- name: Retag and push debian as latest
|
||||
if: matrix.tag == 'debian'
|
||||
run: |-
|
||||
VERSION=${GITHUB_REF##*/}
|
||||
|
||||
# ${latest features} ${latest patch} ${exact version}
|
||||
tags=( ${VERSION%%.*} ${VERSION%.*} ${VERSION} )
|
||||
|
||||
for tag in ${tags[@]}; do
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${tag}
|
||||
docker push ${DOCKER_REGISTRY}/nu:${tag}
|
||||
done
|
||||
|
||||
# latest version
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:latest
|
||||
docker push ${DOCKER_REGISTRY}/nu:latest
|
||||
#endregion semantics tagging
|
271
.github/workflows/release.yml
vendored
271
.github/workflows/release.yml
vendored
@ -1,6 +1,7 @@
|
||||
name: Create Release Draft
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags: ['[0-9]+.[0-9]+.[0-9]+*']
|
||||
|
||||
@ -28,6 +29,96 @@ jobs:
|
||||
command: build
|
||||
args: --release --all --features=extra
|
||||
|
||||
- name: Compress binaries (nu)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu
|
||||
|
||||
- name: Compress binaries (nu_plugin_inc)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_inc
|
||||
|
||||
- name: Compress binaries (nu_plugin_match)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_match
|
||||
|
||||
- name: Compress binaries (nu_plugin_textview)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_textview
|
||||
|
||||
- name: Compress binaries (nu_plugin_binaryview)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_binaryview
|
||||
|
||||
- name: Compress binaries (nu_plugin_chart_bar)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_chart_bar
|
||||
|
||||
- name: Compress binaries (nu_plugin_chart_line)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_chart_line
|
||||
|
||||
- name: Compress binaries (nu_plugin_from_bson)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_from_bson
|
||||
|
||||
- name: Compress binaries (nu_plugin_from_sqlite)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_from_sqlite
|
||||
|
||||
- name: Compress binaries (nu_plugin_from_mp4)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_from_mp4
|
||||
|
||||
- name: Compress binaries (nu_plugin_query_json)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_query_json
|
||||
|
||||
- name: Compress binaries (nu_plugin_s3)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_s3
|
||||
|
||||
- name: Compress binaries (nu_plugin_selector)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_selector
|
||||
|
||||
- name: Compress binaries (nu_plugin_start)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_start
|
||||
|
||||
- name: Compress binaries (nu_plugin_to_bson)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_to_bson
|
||||
|
||||
- name: Compress binaries (nu_plugin_to_sqlite)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_to_sqlite
|
||||
|
||||
- name: Compress binaries (nu_plugin_tree)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_tree
|
||||
|
||||
- name: Compress binaries (nu_plugin_xpath)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_xpath
|
||||
|
||||
- name: Create output directory
|
||||
run: mkdir output
|
||||
|
||||
@ -70,6 +161,96 @@ jobs:
|
||||
command: build
|
||||
args: --release --all --features=extra
|
||||
|
||||
- name: Compress binaries (nu)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu
|
||||
|
||||
- name: Compress binaries (nu_plugin_inc)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_inc
|
||||
|
||||
- name: Compress binaries (nu_plugin_match)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_match
|
||||
|
||||
- name: Compress binaries (nu_plugin_textview)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_textview
|
||||
|
||||
- name: Compress binaries (nu_plugin_binaryview)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_binaryview
|
||||
|
||||
- name: Compress binaries (nu_plugin_chart_bar)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_chart_bar
|
||||
|
||||
- name: Compress binaries (nu_plugin_chart_line)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_chart_line
|
||||
|
||||
- name: Compress binaries (nu_plugin_from_bson)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_from_bson
|
||||
|
||||
- name: Compress binaries (nu_plugin_from_sqlite)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_from_sqlite
|
||||
|
||||
- name: Compress binaries (nu_plugin_from_mp4)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_from_mp4
|
||||
|
||||
- name: Compress binaries (nu_plugin_query_json)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_query_json
|
||||
|
||||
- name: Compress binaries (nu_plugin_s3)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_s3
|
||||
|
||||
- name: Compress binaries (nu_plugin_selector)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_selector
|
||||
|
||||
- name: Compress binaries (nu_plugin_start)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_start
|
||||
|
||||
- name: Compress binaries (nu_plugin_to_bson)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_to_bson
|
||||
|
||||
- name: Compress binaries (nu_plugin_to_sqlite)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_to_sqlite
|
||||
|
||||
- name: Compress binaries (nu_plugin_tree)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_tree
|
||||
|
||||
- name: Compress binaries (nu_plugin_xpath)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_xpath
|
||||
|
||||
- name: Create output directory
|
||||
run: mkdir output
|
||||
|
||||
@ -114,6 +295,96 @@ jobs:
|
||||
command: build
|
||||
args: --release --all --features=extra
|
||||
|
||||
- name: Compress binaries (nu.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_inc.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_inc.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_match.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_match.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_textview.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_textview.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_binaryview.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_binaryview.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_chart_bar.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_chart_bar.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_chart_line.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_chart_line.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_from_bson.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_from_bson.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_from_sqlite.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_from_sqlite.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_from_mp4.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_from_mp4.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_query_json.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_query_json.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_s3.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_s3.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_selector.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_selector.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_start.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_start.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_to_bson.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_to_bson.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_to_sqlite.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_to_sqlite.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_tree.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_tree.exe
|
||||
|
||||
- name: Compress binaries (nu_plugin_xpath.exe)
|
||||
uses: svenstaro/upx-action@v2
|
||||
with:
|
||||
file: target/release/nu_plugin_xpath.exe
|
||||
|
||||
- name: Create output directory
|
||||
run: mkdir output
|
||||
|
||||
|
11
.github/workflows/stale.yml
vendored
11
.github/workflows/stale.yml
vendored
@ -19,11 +19,10 @@ jobs:
|
||||
operations-per-run: 520
|
||||
enable-statistics: true
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: 'This issue is being marked stale because it has been open for 90 days without activity. If you feel that this is in error, please comment below and we will keep it marked as active.'
|
||||
stale-pr-message: 'This PR is being marked stale because it has been open for 45 days without activity. If this PR is still active, please comment below and we will keep it marked as active.'
|
||||
close-issue-message: 'This issue has been marked stale for more than 10 days without activity. Closing this issue, but if you find that the issue is still valid, please reopen.'
|
||||
close-pr-message: 'This PR has been marked stale for more than 10 days without activity. Closing this PR, but if you are still working on it, please reopen.'
|
||||
close-issue-message: 'This issue has been marked stale for more than 100000 days without activity. Closing this issue, but if you find that the issue is still valid, please reopen.'
|
||||
close-pr-message: 'This PR has been marked stale for more than 100 days without activity. Closing this PR, but if you are still working on it, please reopen.'
|
||||
days-before-issue-stale: 90
|
||||
days-before-pr-stale: 45
|
||||
days-before-issue-close: 10
|
||||
days-before-pr-close: 10
|
||||
days-before-issue-close: 100000
|
||||
days-before-pr-close: 100
|
||||
exempt-issue-labels: 'exempt,keep'
|
||||
|
2
.gitpod.Dockerfile
vendored
2
.gitpod.Dockerfile
vendored
@ -2,7 +2,7 @@ FROM gitpod/workspace-full
|
||||
|
||||
# Gitpod will not rebuild Nushell's dev image unless *some* change is made to this Dockerfile.
|
||||
# To force a rebuild, simply increase this counter:
|
||||
ENV TRIGGER_REBUILD 1
|
||||
ENV TRIGGER_REBUILD 2
|
||||
|
||||
USER gitpod
|
||||
|
||||
|
690
Cargo.lock
generated
690
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
63
Cargo.toml
63
Cargo.toml
@ -10,7 +10,7 @@ license = "MIT"
|
||||
name = "nu"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[workspace]
|
||||
members = ["crates/*/"]
|
||||
@ -18,34 +18,34 @@ members = ["crates/*/"]
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
nu-cli = { version = "0.37.0", path="./crates/nu-cli", default-features=false }
|
||||
nu-command = { version = "0.37.0", path="./crates/nu-command" }
|
||||
nu-completion = { version = "0.37.0", path="./crates/nu-completion" }
|
||||
nu-data = { version = "0.37.0", path="./crates/nu-data" }
|
||||
nu-engine = { version = "0.37.0", path="./crates/nu-engine" }
|
||||
nu-errors = { version = "0.37.0", path="./crates/nu-errors" }
|
||||
nu-parser = { version = "0.37.0", path="./crates/nu-parser" }
|
||||
nu-path = { version = "0.37.0", path="./crates/nu-path" }
|
||||
nu-plugin = { version = "0.37.0", path="./crates/nu-plugin" }
|
||||
nu-protocol = { version = "0.37.0", path="./crates/nu-protocol" }
|
||||
nu-source = { version = "0.37.0", path="./crates/nu-source" }
|
||||
nu-value-ext = { version = "0.37.0", path="./crates/nu-value-ext" }
|
||||
nu-cli = { version = "0.40.0", path="./crates/nu-cli", default-features=false }
|
||||
nu-command = { version = "0.40.0", path="./crates/nu-command" }
|
||||
nu-completion = { version = "0.40.0", path="./crates/nu-completion" }
|
||||
nu-data = { version = "0.40.0", path="./crates/nu-data" }
|
||||
nu-engine = { version = "0.40.0", path="./crates/nu-engine" }
|
||||
nu-errors = { version = "0.40.0", path="./crates/nu-errors" }
|
||||
nu-parser = { version = "0.40.0", path="./crates/nu-parser" }
|
||||
nu-path = { version = "0.40.0", path="./crates/nu-path" }
|
||||
nu-plugin = { version = "0.40.0", path="./crates/nu-plugin" }
|
||||
nu-protocol = { version = "0.40.0", path="./crates/nu-protocol" }
|
||||
nu-source = { version = "0.40.0", path="./crates/nu-source" }
|
||||
nu-value-ext = { version = "0.40.0", path="./crates/nu-value-ext" }
|
||||
|
||||
nu_plugin_binaryview = { version = "0.37.0", path="./crates/nu_plugin_binaryview", optional=true }
|
||||
nu_plugin_chart = { version = "0.37.0", path="./crates/nu_plugin_chart", optional=true }
|
||||
nu_plugin_from_bson = { version = "0.37.0", path="./crates/nu_plugin_from_bson", optional=true }
|
||||
nu_plugin_from_sqlite = { version = "0.37.0", path="./crates/nu_plugin_from_sqlite", optional=true }
|
||||
nu_plugin_inc = { version = "0.37.0", path="./crates/nu_plugin_inc", optional=true }
|
||||
nu_plugin_match = { version = "0.37.0", path="./crates/nu_plugin_match", optional=true }
|
||||
nu_plugin_query_json = { version = "0.37.0", path="./crates/nu_plugin_query_json", optional=true }
|
||||
nu_plugin_s3 = { version = "0.37.0", path="./crates/nu_plugin_s3", optional=true }
|
||||
nu_plugin_selector = { version = "0.37.0", path="./crates/nu_plugin_selector", optional=true }
|
||||
nu_plugin_start = { version = "0.37.0", path="./crates/nu_plugin_start", optional=true }
|
||||
nu_plugin_textview = { version = "0.37.0", path="./crates/nu_plugin_textview", optional=true }
|
||||
nu_plugin_to_bson = { version = "0.37.0", path="./crates/nu_plugin_to_bson", optional=true }
|
||||
nu_plugin_to_sqlite = { version = "0.37.0", path="./crates/nu_plugin_to_sqlite", optional=true }
|
||||
nu_plugin_tree = { version = "0.37.0", path="./crates/nu_plugin_tree", optional=true }
|
||||
nu_plugin_xpath = { version = "0.37.0", path="./crates/nu_plugin_xpath", optional=true }
|
||||
nu_plugin_binaryview = { version = "0.40.0", path="./crates/nu_plugin_binaryview", optional=true }
|
||||
nu_plugin_chart = { version = "0.40.0", path="./crates/nu_plugin_chart", optional=true }
|
||||
nu_plugin_from_bson = { version = "0.40.0", path="./crates/nu_plugin_from_bson", optional=true }
|
||||
nu_plugin_from_sqlite = { version = "0.40.0", path="./crates/nu_plugin_from_sqlite", optional=true }
|
||||
nu_plugin_inc = { version = "0.40.0", path="./crates/nu_plugin_inc", optional=true }
|
||||
nu_plugin_match = { version = "0.40.0", path="./crates/nu_plugin_match", optional=true }
|
||||
nu_plugin_query_json = { version = "0.40.0", path="./crates/nu_plugin_query_json", optional=true }
|
||||
nu_plugin_s3 = { version = "0.40.0", path="./crates/nu_plugin_s3", optional=true }
|
||||
nu_plugin_selector = { version = "0.40.0", path="./crates/nu_plugin_selector", optional=true }
|
||||
nu_plugin_start = { version = "0.40.0", path="./crates/nu_plugin_start", optional=true }
|
||||
nu_plugin_textview = { version = "0.40.0", path="./crates/nu_plugin_textview", optional=true }
|
||||
nu_plugin_to_bson = { version = "0.40.0", path="./crates/nu_plugin_to_bson", optional=true }
|
||||
nu_plugin_to_sqlite = { version = "0.40.0", path="./crates/nu_plugin_to_sqlite", optional=true }
|
||||
nu_plugin_tree = { version = "0.40.0", path="./crates/nu_plugin_tree", optional=true }
|
||||
nu_plugin_xpath = { version = "0.40.0", path="./crates/nu_plugin_xpath", optional=true }
|
||||
|
||||
# Required to bootstrap the main binary
|
||||
ctrlc = { version="3.1.7", optional=true }
|
||||
@ -53,7 +53,7 @@ futures = { version="0.3.12", features=["compat", "io-compat"] }
|
||||
itertools = "0.10.0"
|
||||
|
||||
[dev-dependencies]
|
||||
nu-test-support = { version = "0.37.0", path="./crates/nu-test-support" }
|
||||
nu-test-support = { version = "0.40.0", path="./crates/nu-test-support" }
|
||||
serial_test = "0.5.1"
|
||||
hamcrest2 = "0.3.0"
|
||||
rstest = "0.10.0"
|
||||
@ -127,9 +127,6 @@ tree = ["nu_plugin_tree"]
|
||||
xpath = ["nu_plugin_xpath"]
|
||||
zip-support = ["nu-command/zip"]
|
||||
|
||||
#This is disabled in extra for now
|
||||
table-pager = ["nu-command/table-pager"]
|
||||
|
||||
#dataframe feature for nushell
|
||||
dataframe = [
|
||||
"nu-engine/dataframe",
|
||||
@ -141,7 +138,7 @@ dataframe = [
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
opt-level = "z" # Optimize for size.
|
||||
opt-level = "s" # Optimize for size.
|
||||
|
||||
# Core plugins that ship with `cargo install nu` by default
|
||||
# Currently, Cargo limits us to installing only one binary
|
||||
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 - 2021 Yehuda Katz, Jonathan Turner
|
||||
Copyright (c) 2019 - 2021 Nushell Project
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
49
README.md
49
README.md
@ -68,7 +68,7 @@ cargo install nu
|
||||
To install Nu via the [Windows Package Manager](https://aka.ms/winget-cli):
|
||||
|
||||
```shell
|
||||
winget install nu
|
||||
winget install nushell
|
||||
```
|
||||
|
||||
You can also build Nu yourself with all the bells and whistles (be sure to have installed the [dependencies](https://www.nushell.sh/book/installation.html#dependencies) for your platform), once you have checked out this repo with git:
|
||||
@ -76,53 +76,6 @@ You can also build Nu yourself with all the bells and whistles (be sure to have
|
||||
```shell
|
||||
cargo build --workspace --features=extra
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
#### Quickstart
|
||||
|
||||
Want to try Nu right away? Execute the following to get started.
|
||||
|
||||
```shell
|
||||
docker run -it quay.io/nushell/nu:latest
|
||||
```
|
||||
|
||||
#### Guide
|
||||
|
||||
If you want to pull a pre-built container, you can browse tags for the [nushell organization](https://quay.io/organization/nushell)
|
||||
on Quay.io. Pulling a container would come down to:
|
||||
|
||||
```shell
|
||||
docker pull quay.io/nushell/nu
|
||||
docker pull quay.io/nushell/nu-base
|
||||
```
|
||||
|
||||
Both "nu-base" and "nu" provide the nu binary, however, nu-base also includes the source code at `/code`
|
||||
in the container and all dependencies.
|
||||
|
||||
Optionally, you can also build the containers locally using the [dockerfiles provided](docker):
|
||||
To build the base image:
|
||||
|
||||
```shell
|
||||
docker build -f docker/Dockerfile.nu-base -t nushell/nu-base .
|
||||
```
|
||||
|
||||
And then to build the smaller container (using a Multistage build):
|
||||
|
||||
```shell
|
||||
docker build -f docker/Dockerfile -t nushell/nu .
|
||||
```
|
||||
|
||||
Either way, you can run either container as follows:
|
||||
|
||||
```shell
|
||||
docker run -it nushell/nu-base
|
||||
docker run -it nushell/nu
|
||||
/> exit
|
||||
```
|
||||
|
||||
The second container is a bit smaller if the size is important to you.
|
||||
|
||||
### Packaging status
|
||||
|
||||
[](https://repology.org/project/nushell/versions)
|
||||
|
@ -9,7 +9,7 @@ description = "Library for ANSI terminal colors and styles (bold, underline)"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-ansi-term"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@ -21,7 +21,6 @@ derive_serde_style = ["serde"]
|
||||
[dependencies]
|
||||
overload = "0.1.1"
|
||||
serde = { version="1.0.90", features=["derive"], optional=true }
|
||||
itertools = "0.10.0"
|
||||
|
||||
# [dependencies.serde]
|
||||
# version = "1.0.90"
|
||||
|
@ -613,7 +613,7 @@ mod serde_json_tests {
|
||||
let serialized = serde_json::to_string(&color).unwrap();
|
||||
let deserialized: Color = serde_json::from_str(&serialized).unwrap();
|
||||
|
||||
assert_eq!(color, &deserialized);
|
||||
assert_eq!(color, deserialized);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4,23 +4,24 @@ description = "CLI for nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-cli"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
build = "build.rs"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-completion = { version = "0.37.0", path="../nu-completion" }
|
||||
nu-command = { version = "0.37.0", path="../nu-command" }
|
||||
nu-data = { version = "0.37.0", path="../nu-data" }
|
||||
nu-engine = { version = "0.37.0", path="../nu-engine" }
|
||||
nu-errors = { version = "0.37.0", path="../nu-errors" }
|
||||
nu-parser = { version = "0.37.0", path="../nu-parser" }
|
||||
nu-protocol = { version = "0.37.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.37.0", path="../nu-source" }
|
||||
nu-stream = { version = "0.37.0", path="../nu-stream" }
|
||||
nu-ansi-term = { version = "0.37.0", path="../nu-ansi-term" }
|
||||
nu-completion = { version = "0.40.0", path="../nu-completion" }
|
||||
nu-command = { version = "0.40.0", path="../nu-command" }
|
||||
nu-data = { version = "0.40.0", path="../nu-data" }
|
||||
nu-engine = { version = "0.40.0", path="../nu-engine" }
|
||||
nu-errors = { version = "0.40.0", path="../nu-errors" }
|
||||
nu-parser = { version = "0.40.0", path="../nu-parser" }
|
||||
nu-protocol = { version = "0.40.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.40.0", path="../nu-source" }
|
||||
nu-stream = { version = "0.40.0", path="../nu-stream" }
|
||||
nu-ansi-term = { version = "0.40.0", path="../nu-ansi-term" }
|
||||
nu-path = { version = "0.40.0", path="../nu-path" }
|
||||
|
||||
indexmap ="1.6.1"
|
||||
log = "0.4.14"
|
||||
|
@ -24,6 +24,7 @@ use rustyline::{self, error::ReadlineError};
|
||||
|
||||
use nu_errors::ShellError;
|
||||
use nu_parser::ParserScope;
|
||||
use nu_path::expand_tilde;
|
||||
use nu_protocol::{hir::ExternalRedirection, ConfigPath, UntaggedValue, Value};
|
||||
|
||||
use log::trace;
|
||||
@ -54,7 +55,7 @@ pub fn search_paths() -> Vec<std::path::PathBuf> {
|
||||
{
|
||||
for pipeline in pipelines {
|
||||
if let Ok(plugin_dir) = pipeline.as_string() {
|
||||
search_paths.push(PathBuf::from(plugin_dir));
|
||||
search_paths.push(expand_tilde(plugin_dir));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -371,7 +372,7 @@ pub fn cli(
|
||||
LineResult::ClearHistory => {
|
||||
if options.save_history {
|
||||
rl.clear_history();
|
||||
let _ = rl.append_history(&history_path);
|
||||
std::fs::remove_file(&history_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,28 +5,28 @@ description = "CLI for nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-command"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-data = { version = "0.37.0", path="../nu-data" }
|
||||
nu-engine = { version = "0.37.0", path="../nu-engine" }
|
||||
nu-errors = { version = "0.37.0", path="../nu-errors" }
|
||||
nu-json = { version = "0.37.0", path="../nu-json" }
|
||||
nu-path = { version = "0.37.0", path="../nu-path" }
|
||||
nu-parser = { version = "0.37.0", path="../nu-parser" }
|
||||
nu-plugin = { version = "0.37.0", path="../nu-plugin" }
|
||||
nu-protocol = { version = "0.37.0", path="../nu-protocol" }
|
||||
nu-serde = { version = "0.37.0", path="../nu-serde" }
|
||||
nu-source = { version = "0.37.0", path="../nu-source" }
|
||||
nu-stream = { version = "0.37.0", path="../nu-stream" }
|
||||
nu-table = { version = "0.37.0", path="../nu-table" }
|
||||
nu-test-support = { version = "0.37.0", path="../nu-test-support" }
|
||||
nu-value-ext = { version = "0.37.0", path="../nu-value-ext" }
|
||||
nu-ansi-term = { version = "0.37.0", path="../nu-ansi-term" }
|
||||
nu-pretty-hex = { version = "0.37.0", path="../nu-pretty-hex" }
|
||||
nu-data = { version = "0.40.0", path="../nu-data" }
|
||||
nu-engine = { version = "0.40.0", path="../nu-engine" }
|
||||
nu-errors = { version = "0.40.0", path="../nu-errors" }
|
||||
nu-json = { version = "0.40.0", path="../nu-json" }
|
||||
nu-path = { version = "0.40.0", path="../nu-path" }
|
||||
nu-parser = { version = "0.40.0", path="../nu-parser" }
|
||||
nu-plugin = { version = "0.40.0", path="../nu-plugin" }
|
||||
nu-protocol = { version = "0.40.0", path="../nu-protocol" }
|
||||
nu-serde = { version = "0.40.0", path="../nu-serde" }
|
||||
nu-source = { version = "0.40.0", path="../nu-source" }
|
||||
nu-stream = { version = "0.40.0", path="../nu-stream" }
|
||||
nu-table = { version = "0.40.0", path="../nu-table" }
|
||||
nu-test-support = { version = "0.40.0", path="../nu-test-support" }
|
||||
nu-value-ext = { version = "0.40.0", path="../nu-value-ext" }
|
||||
nu-ansi-term = { version = "0.40.0", path="../nu-ansi-term" }
|
||||
nu-pretty-hex = { version = "0.40.0", path="../nu-pretty-hex" }
|
||||
|
||||
url = "2.2.1"
|
||||
mime = "0.3.16"
|
||||
@ -34,25 +34,19 @@ Inflector = "0.11"
|
||||
arboard = { version="1.1.0", optional=true }
|
||||
base64 = "0.13.0"
|
||||
bigdecimal = { package = "bigdecimal-rs", version = "0.2.1", features = ["serde"] }
|
||||
byte-unit = "4.0.9"
|
||||
bytes = "1.0.1"
|
||||
calamine = "0.18.0"
|
||||
chrono = { version="0.4.19", features=["serde"] }
|
||||
chrono-tz = "0.5.3"
|
||||
codespan-reporting = "0.11.0"
|
||||
crossterm = { version="0.19.0", optional=true }
|
||||
csv = "1.1.3"
|
||||
ctrlc = { version="3.1.7", optional=true }
|
||||
derive-new = "0.5.8"
|
||||
directories-next = "2.0.0"
|
||||
dirs-next = "2.0.0"
|
||||
dtparse = "1.2.0"
|
||||
eml-parser = "0.1.0"
|
||||
encoding_rs = "0.8.28"
|
||||
filesize = "0.2.0"
|
||||
fs_extra = "1.2.0"
|
||||
futures = { version="0.3.12", features=["compat", "io-compat"] }
|
||||
getset = "0.1.1"
|
||||
glob = "0.3.0"
|
||||
htmlescape = "0.3.1"
|
||||
ical = "0.7.0"
|
||||
@ -62,37 +56,28 @@ lazy_static = "1.*"
|
||||
log = "0.4.14"
|
||||
md-5 = "0.9.1"
|
||||
meval = "0.2.0"
|
||||
minus = { version="3.4.0", optional=true, features=["async_std_lib", "search"] }
|
||||
num-bigint = { version="0.3.1", features=["serde"] }
|
||||
num-format = { version="0.4.0", features=["with-num-bigint"] }
|
||||
num-traits = "0.2.14"
|
||||
parking_lot = "0.11.1"
|
||||
pin-utils = "0.1.0"
|
||||
query_interface = "0.3.5"
|
||||
quick-xml = "0.22"
|
||||
rand = "0.8"
|
||||
rayon = "1.5.0"
|
||||
regex = "1.4.3"
|
||||
reqwest = {version = "0.11", optional = true }
|
||||
roxmltree = "0.14.0"
|
||||
rust-embed = "5.9.0"
|
||||
rustyline = { version="9.0.0", optional=true }
|
||||
serde = { version="1.0.123", features=["derive"] }
|
||||
serde_bytes = "0.11.5"
|
||||
serde_ini = "0.2.0"
|
||||
serde_json = "1.0.61"
|
||||
serde_urlencoded = "0.7.0"
|
||||
serde_yaml = "0.8.16"
|
||||
sha2 = "0.9.3"
|
||||
strip-ansi-escapes = "0.1.0"
|
||||
sxd-document = "0.3.2"
|
||||
sxd-xpath = "0.4.2"
|
||||
sysinfo = { version = "0.20.2", optional = true }
|
||||
thiserror = "1.0.26"
|
||||
tempfile = "3.2.0"
|
||||
term = { version="0.7.0", optional=true }
|
||||
term_size = "0.3.2"
|
||||
termcolor = "1.1.2"
|
||||
titlecase = "1.1.0"
|
||||
tokio = { version = "1", features = ["rt-multi-thread"], optional = true }
|
||||
toml = "0.5.8"
|
||||
@ -104,9 +89,9 @@ zip = { version="0.5.9", optional=true }
|
||||
digest = "0.9.0"
|
||||
|
||||
[dependencies.polars]
|
||||
version = "0.15.1"
|
||||
version = "0.17.0"
|
||||
optional = true
|
||||
features = ["parquet", "json", "random", "pivot", "strings", "is_in", "temporal"]
|
||||
features = ["parquet", "json", "random", "pivot", "strings", "is_in", "temporal", "cum_agg", "rolling_window"]
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
umask = "1.0.0"
|
||||
@ -115,13 +100,8 @@ users = "0.11.0"
|
||||
# TODO this will be possible with new dependency resolver
|
||||
# (currently on nightly behind -Zfeatures=itarget):
|
||||
# https://github.com/rust-lang/cargo/issues/7914
|
||||
#[target.'cfg(not(windows))'.dependencies]
|
||||
#num-format = {version = "0.4", features = ["with-system-locale"]}
|
||||
|
||||
[dependencies.rusqlite]
|
||||
features = ["bundled", "blob"]
|
||||
optional = true
|
||||
version = "0.25.3"
|
||||
# [target.'cfg(not(windows))'.dependencies]
|
||||
# num-format = { version = "0.4", features = ["with-system-locale"] }
|
||||
|
||||
[build-dependencies]
|
||||
shadow-rs = "0.6"
|
||||
@ -136,9 +116,8 @@ clipboard-cli = ["arboard"]
|
||||
rustyline-support = ["rustyline"]
|
||||
stable = []
|
||||
trash-support = ["trash"]
|
||||
table-pager = ["minus", "crossterm"]
|
||||
dataframe = ["nu-protocol/dataframe", "polars"]
|
||||
fetch = ["reqwest", "tokio"]
|
||||
post = ["reqwest", "tokio"]
|
||||
sys = ["sysinfo"]
|
||||
ps = ["sysinfo"]
|
||||
ps = ["sysinfo"]
|
||||
|
@ -38,7 +38,7 @@ impl WholeStreamCommand for SubCommand {
|
||||
},
|
||||
Example {
|
||||
description: "Set coloring options",
|
||||
example: "config set color_config [[header_align header_bold]; [left $true]]",
|
||||
example: "config set color_config [[header_align header_color]; [left white_bold]]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
|
118
crates/nu-command/src/commands/conversions/into/column_path.rs
Normal file
118
crates/nu-command/src/commands/conversions/into/column_path.rs
Normal file
@ -0,0 +1,118 @@
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{ColumnPath, Primitive, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
|
||||
pub struct SubCommand;
|
||||
|
||||
impl WholeStreamCommand for SubCommand {
|
||||
fn name(&self) -> &str {
|
||||
"into column_path"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("into column_path").rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
"values to convert to column_path",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Convert value to column path"
|
||||
}
|
||||
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
into_filepath(args)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Convert string to column_path in table",
|
||||
example: "echo [[name]; ['/dev/null'] ['C:\\Program Files'] ['../../Cargo.toml']] | into column_path name",
|
||||
result: Some(vec![
|
||||
UntaggedValue::row(indexmap! {
|
||||
"name".to_string() => UntaggedValue::column_path("/dev/null", Span::unknown()).into(),
|
||||
})
|
||||
.into(),
|
||||
UntaggedValue::row(indexmap! {
|
||||
"name".to_string() => UntaggedValue::column_path("C:\\Program Files", Span::unknown()).into(),
|
||||
})
|
||||
.into(),
|
||||
UntaggedValue::row(indexmap! {
|
||||
"name".to_string() => UntaggedValue::column_path("../../Cargo.toml", Span::unknown()).into(),
|
||||
})
|
||||
.into(),
|
||||
]),
|
||||
},
|
||||
Example {
|
||||
description: "Convert string to column_path",
|
||||
example: "echo 'Cargo.toml' | into column_path",
|
||||
result: Some(vec![UntaggedValue::column_path("Cargo.toml", Span::unknown()).into()]),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
fn into_filepath(args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let column_paths: Vec<ColumnPath> = args.rest(0)?;
|
||||
|
||||
Ok(args
|
||||
.input
|
||||
.map(move |v| {
|
||||
if column_paths.is_empty() {
|
||||
action(&v, v.tag())
|
||||
} else {
|
||||
let mut ret = v;
|
||||
for path in &column_paths {
|
||||
ret = ret.swap_data_by_column_path(
|
||||
path,
|
||||
Box::new(move |old| action(old, old.tag())),
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(ret)
|
||||
}
|
||||
})
|
||||
.into_input_stream())
|
||||
}
|
||||
|
||||
pub fn action(input: &Value, tag: impl Into<Tag>) -> Result<Value, ShellError> {
|
||||
let tag = tag.into();
|
||||
match &input.value {
|
||||
UntaggedValue::Primitive(prim) => Ok(UntaggedValue::column_path(
|
||||
match prim {
|
||||
Primitive::String(a_string) => a_string,
|
||||
_ => {
|
||||
return Err(ShellError::unimplemented(
|
||||
"'into column_path' for non-string primitives",
|
||||
))
|
||||
}
|
||||
},
|
||||
Span::unknown(),
|
||||
)
|
||||
.into_value(&tag)),
|
||||
UntaggedValue::Row(_) => Err(ShellError::labeled_error(
|
||||
"specify column name to use, with 'into column_path COLUMN'",
|
||||
"found table",
|
||||
tag,
|
||||
)),
|
||||
_ => Err(ShellError::unimplemented(
|
||||
"'into column_path' for unsupported type",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::ShellError;
|
||||
use super::SubCommand;
|
||||
|
||||
#[test]
|
||||
fn examples_work_as_expected() -> Result<(), ShellError> {
|
||||
use crate::examples::test as test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
}
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
mod binary;
|
||||
mod column_path;
|
||||
mod command;
|
||||
mod filepath;
|
||||
mod filesize;
|
||||
@ -7,6 +8,7 @@ pub mod string;
|
||||
|
||||
pub use self::filesize::SubCommand as IntoFilesize;
|
||||
pub use binary::SubCommand as IntoBinary;
|
||||
pub use column_path::SubCommand as IntoColumnPath;
|
||||
pub use command::Command as Into;
|
||||
pub use filepath::SubCommand as IntoFilepath;
|
||||
pub use int::SubCommand as IntoInt;
|
||||
|
@ -101,17 +101,14 @@ fn if_command(args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
|
||||
//FIXME: should we use the scope that's brought in as well?
|
||||
let condition = evaluate_baseline_expr(cond, &context);
|
||||
match condition {
|
||||
let result = match condition {
|
||||
Ok(condition) => match condition.as_bool() {
|
||||
Ok(b) => {
|
||||
let result = if b {
|
||||
if b {
|
||||
run_block(&then_case.block, &context, input, external_redirection)
|
||||
} else {
|
||||
run_block(&else_case.block, &context, input, external_redirection)
|
||||
};
|
||||
context.scope.exit_scope();
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
Err(e) => Ok(OutputStream::from_stream(
|
||||
vec![UntaggedValue::Error(e).into_untagged_value()].into_iter(),
|
||||
@ -120,13 +117,16 @@ fn if_command(args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
Err(e) => Ok(OutputStream::from_stream(
|
||||
vec![UntaggedValue::Error(e).into_untagged_value()].into_iter(),
|
||||
)),
|
||||
}
|
||||
};
|
||||
context.scope.exit_scope();
|
||||
result
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::If;
|
||||
use super::ShellError;
|
||||
use nu_test_support::nu;
|
||||
|
||||
#[test]
|
||||
fn examples_work_as_expected() -> Result<(), ShellError> {
|
||||
@ -134,4 +134,21 @@ mod tests {
|
||||
|
||||
test_examples(If {})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_doesnt_leak_on_error() {
|
||||
let actual = nu!(
|
||||
".",
|
||||
r#"
|
||||
def test-leak [] {
|
||||
let var = "hello"
|
||||
if 0 == "" {echo ok} {echo not}
|
||||
}
|
||||
test-leak
|
||||
echo $var
|
||||
"#
|
||||
);
|
||||
|
||||
assert!(actual.err.contains("unknown variable"));
|
||||
}
|
||||
}
|
||||
|
@ -69,13 +69,11 @@ pub fn source(args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
for lib_path in dir {
|
||||
match lib_path {
|
||||
Ok(name) => {
|
||||
let path = canonicalize_with(&source_file, name).map_err(|e| {
|
||||
ShellError::labeled_error(
|
||||
format!("Can't load source file. Reason: {}", e.to_string()),
|
||||
"Can't load this file",
|
||||
filename.span(),
|
||||
)
|
||||
})?;
|
||||
let path = if let Ok(p) = canonicalize_with(&source_file, name) {
|
||||
p
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if let Ok(contents) = std::fs::read_to_string(path) {
|
||||
let result = script::run_script_standalone(contents, true, ctx, false);
|
||||
|
@ -166,7 +166,7 @@ This will get the 3rd (note that `nth` is zero-based) row in the table created
|
||||
by the `ls` command. You can use `nth` on any table created by other commands
|
||||
as well.
|
||||
|
||||
You can also access the column of data in one of two ways. If you want to want
|
||||
You can also access the column of data in one of two ways. If you want
|
||||
to keep the column as part of a new table, you can use `select`.
|
||||
```
|
||||
ls | select name
|
||||
@ -274,7 +274,7 @@ This can be helpful if you want to later processes these values.
|
||||
The `echo` command can pair well with the `each` command which can run
|
||||
code on each row, or item, of input.
|
||||
|
||||
You can continue to learn more about the `echo` command by running:
|
||||
You can continue to learn more about the `each` command by running:
|
||||
```
|
||||
tutor each
|
||||
```
|
||||
|
@ -121,7 +121,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let tail = df.as_ref().get_columns().iter().map(|col| {
|
||||
let count = col.len() as f64;
|
||||
|
||||
let sum = match col.sum_as_series().cast_with_dtype(&DataType::Float64) {
|
||||
let sum = match col.sum_as_series().cast(&DataType::Float64) {
|
||||
Ok(ca) => match ca.get(0) {
|
||||
AnyValue::Float64(v) => Some(v),
|
||||
_ => None,
|
||||
@ -144,7 +144,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let min = match col.min_as_series().cast_with_dtype(&DataType::Float64) {
|
||||
let min = match col.min_as_series().cast(&DataType::Float64) {
|
||||
Ok(ca) => match ca.get(0) {
|
||||
AnyValue::Float64(v) => Some(v),
|
||||
_ => None,
|
||||
@ -153,7 +153,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
};
|
||||
|
||||
let q_25 = match col.quantile_as_series(0.25) {
|
||||
Ok(ca) => match ca.cast_with_dtype(&DataType::Float64) {
|
||||
Ok(ca) => match ca.cast(&DataType::Float64) {
|
||||
Ok(ca) => match ca.get(0) {
|
||||
AnyValue::Float64(v) => Some(v),
|
||||
_ => None,
|
||||
@ -164,7 +164,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
};
|
||||
|
||||
let q_50 = match col.quantile_as_series(0.50) {
|
||||
Ok(ca) => match ca.cast_with_dtype(&DataType::Float64) {
|
||||
Ok(ca) => match ca.cast(&DataType::Float64) {
|
||||
Ok(ca) => match ca.get(0) {
|
||||
AnyValue::Float64(v) => Some(v),
|
||||
_ => None,
|
||||
@ -175,7 +175,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
};
|
||||
|
||||
let q_75 = match col.quantile_as_series(0.75) {
|
||||
Ok(ca) => match ca.cast_with_dtype(&DataType::Float64) {
|
||||
Ok(ca) => match ca.cast(&DataType::Float64) {
|
||||
Ok(ca) => match ca.get(0) {
|
||||
AnyValue::Float64(v) => Some(v),
|
||||
_ => None,
|
||||
@ -185,7 +185,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
Err(_) => None,
|
||||
};
|
||||
|
||||
let max = match col.max_as_series().cast_with_dtype(&DataType::Float64) {
|
||||
let max = match col.max_as_series().cast(&DataType::Float64) {
|
||||
Ok(ca) => match ca.get(0) {
|
||||
AnyValue::Float64(v) => Some(v),
|
||||
_ => None,
|
||||
|
@ -44,6 +44,12 @@ impl WholeStreamCommand for DataFrame {
|
||||
"type of join. Inner by default",
|
||||
Some('t'),
|
||||
)
|
||||
.named(
|
||||
"suffix",
|
||||
SyntaxShape::String,
|
||||
"suffix for the columns of the right dataframe",
|
||||
Some('s'),
|
||||
)
|
||||
}
|
||||
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
@ -104,6 +110,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let r_df: Value = args.req(0)?;
|
||||
let l_col: Vec<Value> = args.req_named("left")?;
|
||||
let r_col: Vec<Value> = args.req_named("right")?;
|
||||
let r_suffix: Option<Tagged<String>> = args.get_flag("suffix")?;
|
||||
let join_type_op: Option<Tagged<String>> = args.get_flag("type")?;
|
||||
|
||||
let join_type = match join_type_op {
|
||||
@ -124,6 +131,8 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
},
|
||||
};
|
||||
|
||||
let suffix = r_suffix.map(|s| s.item);
|
||||
|
||||
let (l_col_string, l_col_span) = convert_columns(&l_col, &tag)?;
|
||||
let (r_col_string, r_col_span) = convert_columns(&r_col, &tag)?;
|
||||
|
||||
@ -142,7 +151,13 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
)?;
|
||||
|
||||
df.as_ref()
|
||||
.join(r_df.as_ref(), &l_col_string, &r_col_string, join_type)
|
||||
.join(
|
||||
r_df.as_ref(),
|
||||
&l_col_string,
|
||||
&r_col_string,
|
||||
join_type,
|
||||
suffix,
|
||||
)
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &l_col_span, None))
|
||||
}
|
||||
_ => Err(ShellError::labeled_error(
|
||||
|
@ -8,7 +8,7 @@ use nu_protocol::{
|
||||
};
|
||||
|
||||
use nu_source::Tagged;
|
||||
use polars::prelude::{CsvEncoding, CsvReader, JsonReader, ParquetReader, PolarsError, SerReader};
|
||||
use polars::prelude::{CsvEncoding, CsvReader, JsonReader, ParquetReader, SerReader};
|
||||
use std::fs::File;
|
||||
|
||||
pub struct DataFrame;
|
||||
@ -206,15 +206,6 @@ fn from_csv(args: CommandArgs) -> Result<polars::prelude::DataFrame, ShellError>
|
||||
|
||||
match csv_reader.finish() {
|
||||
Ok(df) => Ok(df),
|
||||
Err(e) => match e {
|
||||
PolarsError::Other(_) => Err(ShellError::labeled_error_with_secondary(
|
||||
"Schema error",
|
||||
"Error with the inferred schema",
|
||||
&file.tag.span,
|
||||
"You can use the argument 'infer_schema' with a number of rows large enough to better infer the schema",
|
||||
&file.tag.span,
|
||||
)),
|
||||
_ => Err(parse_polars_error::<&str>(&e, &file.tag.span, None)),
|
||||
},
|
||||
Err(e) => Err(parse_polars_error::<&str>(&e, &file.tag.span, None)),
|
||||
}
|
||||
}
|
||||
|
@ -101,9 +101,9 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
|
||||
let cum_type = CumType::from_str(&cum_type.item, &cum_type.tag.span)?;
|
||||
let mut res = match cum_type {
|
||||
CumType::Max => series.cum_max(reverse),
|
||||
CumType::Min => series.cum_min(reverse),
|
||||
CumType::Sum => series.cum_sum(reverse),
|
||||
CumType::Max => series.cummax(reverse),
|
||||
CumType::Min => series.cummin(reverse),
|
||||
CumType::Sum => series.cumsum(reverse),
|
||||
};
|
||||
|
||||
let name = format!("{}_{}", series.name(), cum_type.to_str());
|
||||
|
@ -53,7 +53,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.day().into_series();
|
||||
|
@ -53,7 +53,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.hour().into_series();
|
||||
|
@ -53,7 +53,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.minute().into_series();
|
||||
|
@ -53,7 +53,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.month().into_series();
|
||||
|
@ -53,7 +53,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.nanosecond().into_series();
|
||||
|
@ -56,7 +56,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.ordinal().into_series();
|
||||
|
@ -53,7 +53,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.second().into_series();
|
||||
|
@ -53,7 +53,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.week().into_series();
|
||||
|
@ -53,7 +53,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.weekday().into_series();
|
||||
|
@ -56,7 +56,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.year().into_series();
|
||||
|
@ -6,7 +6,7 @@ use nu_protocol::{
|
||||
Signature, SyntaxShape, UntaggedValue,
|
||||
};
|
||||
use nu_source::Tagged;
|
||||
use polars::prelude::DataType;
|
||||
use polars::prelude::{DataType, RollingOptions};
|
||||
|
||||
enum RollType {
|
||||
Min,
|
||||
@ -57,7 +57,6 @@ impl WholeStreamCommand for DataFrame {
|
||||
Signature::build("dataframe rolling")
|
||||
.required("type", SyntaxShape::String, "rolling operation")
|
||||
.required("window", SyntaxShape::Int, "Window size for rolling")
|
||||
.switch("ignore_nulls", "Ignore nulls in column", Some('i'))
|
||||
}
|
||||
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
@ -112,7 +111,6 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let roll_type: Tagged<String> = args.req(0)?;
|
||||
let window_size: Tagged<i64> = args.req(1)?;
|
||||
let ignore_nulls = args.has_flag("ignore_nulls");
|
||||
|
||||
let (df, df_tag) = NuDataFrame::try_from_stream(&mut args.input, &tag.span)?;
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
@ -126,31 +124,17 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
}
|
||||
|
||||
let roll_type = RollType::from_str(&roll_type.item, &roll_type.tag.span)?;
|
||||
let rolling_opts = RollingOptions {
|
||||
window_size: window_size.item as usize,
|
||||
min_periods: window_size.item as usize,
|
||||
weights: None,
|
||||
center: false,
|
||||
};
|
||||
let res = match roll_type {
|
||||
RollType::Max => series.rolling_max(
|
||||
window_size.item as u32,
|
||||
None,
|
||||
ignore_nulls,
|
||||
window_size.item as u32,
|
||||
),
|
||||
RollType::Min => series.rolling_min(
|
||||
window_size.item as u32,
|
||||
None,
|
||||
ignore_nulls,
|
||||
window_size.item as u32,
|
||||
),
|
||||
RollType::Sum => series.rolling_sum(
|
||||
window_size.item as u32,
|
||||
None,
|
||||
ignore_nulls,
|
||||
window_size.item as u32,
|
||||
),
|
||||
RollType::Mean => series.rolling_mean(
|
||||
window_size.item as u32,
|
||||
None,
|
||||
ignore_nulls,
|
||||
window_size.item as u32,
|
||||
),
|
||||
RollType::Max => series.rolling_max(rolling_opts),
|
||||
RollType::Min => series.rolling_min(rolling_opts),
|
||||
RollType::Sum => series.rolling_sum(rolling_opts),
|
||||
RollType::Mean => series.rolling_mean(rolling_opts),
|
||||
};
|
||||
|
||||
let mut res = res.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
@ -78,7 +78,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let casted = match indices.dtype() {
|
||||
DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => indices
|
||||
.as_ref()
|
||||
.cast_with_dtype(&DataType::UInt32)
|
||||
.cast(&DataType::UInt32)
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &value.tag.span, None)),
|
||||
_ => Err(ShellError::labeled_error_with_secondary(
|
||||
"Incorrect type",
|
||||
|
@ -58,7 +58,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let series = df.as_series(&df_tag.span)?;
|
||||
|
||||
let casted = series
|
||||
.date64()
|
||||
.datetime()
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &df_tag.span, None))?;
|
||||
|
||||
let res = casted.strftime(&fmt.item).into_series();
|
||||
|
@ -92,7 +92,7 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let casted = match series.dtype() {
|
||||
DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => series
|
||||
.as_ref()
|
||||
.cast_with_dtype(&DataType::UInt32)
|
||||
.cast(&DataType::UInt32)
|
||||
.map_err(|e| parse_polars_error::<&str>(&e, &value.tag.span, None)),
|
||||
_ => Err(ShellError::labeled_error_with_secondary(
|
||||
"Incorrect type",
|
||||
|
@ -73,9 +73,9 @@ fn command(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let writer = CsvWriter::new(&mut file);
|
||||
|
||||
let writer = if no_header {
|
||||
writer.has_headers(false)
|
||||
writer.has_header(false)
|
||||
} else {
|
||||
writer.has_headers(true)
|
||||
writer.has_header(true)
|
||||
};
|
||||
|
||||
let writer = match delimiter {
|
||||
|
@ -53,13 +53,12 @@ pub(crate) fn parse_polars_error<T: AsRef<str>>(
|
||||
PolarsError::DataTypeMisMatch(_) => "Data Type Mismatch",
|
||||
PolarsError::NotFound(_) => "Not Found",
|
||||
PolarsError::ShapeMisMatch(_) => "Shape Mismatch",
|
||||
PolarsError::Other(_) => "Other",
|
||||
PolarsError::ComputeError(_) => "Computer error",
|
||||
PolarsError::OutOfBounds(_) => "Out Of Bounds",
|
||||
PolarsError::NoSlice => "No Slice",
|
||||
PolarsError::NoData(_) => "No Data",
|
||||
PolarsError::ValueError(_) => "Value Error",
|
||||
PolarsError::MemoryNotAligned => "Memory Not Aligned",
|
||||
PolarsError::ParquetError(_) => "Parquet Error",
|
||||
PolarsError::RandError(_) => "Rand Error",
|
||||
PolarsError::HasNullValues(_) => "Has Null Values",
|
||||
PolarsError::UnknownSchema(_) => "Unknown Schema",
|
||||
|
@ -36,6 +36,7 @@ mod skip;
|
||||
pub(crate) mod sort_by;
|
||||
mod uniq;
|
||||
mod update;
|
||||
mod update_cells;
|
||||
mod where_;
|
||||
mod wrap;
|
||||
mod zip_;
|
||||
@ -78,6 +79,7 @@ pub use skip::{Skip, SkipUntil, SkipWhile};
|
||||
pub use sort_by::SortBy;
|
||||
pub use uniq::Uniq;
|
||||
pub use update::Command as Update;
|
||||
pub use update_cells::SubCommand as UpdateCells;
|
||||
pub use where_::Command as Where;
|
||||
pub use wrap::Wrap;
|
||||
pub use zip_::Command as Zip;
|
||||
|
@ -15,11 +15,18 @@ impl WholeStreamCommand for Command {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("select").rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
"the columns to select from the table",
|
||||
)
|
||||
Signature::build("select")
|
||||
.named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"Optionally operate by column path",
|
||||
Some('c'),
|
||||
)
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
"the columns to select from the table",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -27,10 +34,10 @@ impl WholeStreamCommand for Command {
|
||||
}
|
||||
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let columns: Vec<ColumnPath> = args.rest(0)?;
|
||||
let mut columns = args.rest(0)?;
|
||||
columns.extend(column_paths_from_args(&args)?);
|
||||
let input = args.input;
|
||||
let name = args.call_info.name_tag;
|
||||
|
||||
select(name, columns, input)
|
||||
}
|
||||
|
||||
@ -46,10 +53,51 @@ impl WholeStreamCommand for Command {
|
||||
example: "ls | select name size",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Select columns dynamically",
|
||||
example: "[[a b]; [1 2]] | select -c [a]",
|
||||
result: Some(vec![UntaggedValue::row(indexmap! {
|
||||
"a".to_string() => UntaggedValue::int(1).into(),
|
||||
})
|
||||
.into()]),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
fn column_paths_from_args(args: &CommandArgs) -> Result<Vec<ColumnPath>, ShellError> {
|
||||
let column_paths: Option<Vec<Value>> = args.get_flag("columns")?;
|
||||
let has_columns = column_paths.is_some();
|
||||
let column_paths = match column_paths {
|
||||
Some(cols) => {
|
||||
let mut c = Vec::new();
|
||||
for col in cols {
|
||||
let colpath = ColumnPath::build(&col.convert_to_string().spanned_unknown());
|
||||
if !colpath.is_empty() {
|
||||
c.push(colpath)
|
||||
}
|
||||
}
|
||||
c
|
||||
}
|
||||
None => Vec::new(),
|
||||
};
|
||||
|
||||
if has_columns && column_paths.is_empty() {
|
||||
let colval: Option<Value> = args.get_flag("columns")?;
|
||||
let colspan = match colval {
|
||||
Some(v) => v.tag.span,
|
||||
None => Span::unknown(),
|
||||
};
|
||||
return Err(ShellError::labeled_error(
|
||||
"Requires a list of columns",
|
||||
"must be a list of columns",
|
||||
colspan,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(column_paths)
|
||||
}
|
||||
|
||||
fn select(
|
||||
name: Tag,
|
||||
columns: Vec<ColumnPath>,
|
||||
|
211
crates/nu-command/src/commands/filters/update_cells.rs
Normal file
211
crates/nu-command/src/commands/filters/update_cells.rs
Normal file
@ -0,0 +1,211 @@
|
||||
use crate::prelude::*;
|
||||
use nu_engine::run_block;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{
|
||||
hir::{CapturedBlock, ExternalRedirection},
|
||||
Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, Value,
|
||||
};
|
||||
use std::collections::HashSet;
|
||||
use std::iter::FromIterator;
|
||||
|
||||
pub struct SubCommand;
|
||||
|
||||
impl WholeStreamCommand for SubCommand {
|
||||
fn name(&self) -> &str {
|
||||
"update cells"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("update cells")
|
||||
.required(
|
||||
"block",
|
||||
SyntaxShape::Block,
|
||||
"the block to run an update for each cell",
|
||||
)
|
||||
.named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"list of columns to update",
|
||||
Some('c'),
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Update the table cells."
|
||||
}
|
||||
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
update_cells(args)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Update the zero value cells to empty strings.",
|
||||
example: r#"[
|
||||
[2021-04-16, 2021-06-10, 2021-09-18, 2021-10-15, 2021-11-16, 2021-11-17, 2021-11-18];
|
||||
[ 37, 0, 0, 0, 37, 0, 0]
|
||||
] | update cells {|value|
|
||||
if ($value | into int) == 0 {
|
||||
""
|
||||
} {
|
||||
$value
|
||||
}
|
||||
}"#,
|
||||
result: Some(vec![UntaggedValue::row(indexmap! {
|
||||
"2021-04-16".to_string() => UntaggedValue::int(37).into(),
|
||||
"2021-06-10".to_string() => Value::from(""),
|
||||
"2021-09-18".to_string() => Value::from(""),
|
||||
"2021-10-15".to_string() => Value::from(""),
|
||||
"2021-11-16".to_string() => UntaggedValue::int(37).into(),
|
||||
"2021-11-17".to_string() => Value::from(""),
|
||||
"2021-11-18".to_string() => Value::from(""),
|
||||
})
|
||||
.into()]),
|
||||
},
|
||||
Example {
|
||||
description: "Update the zero value cells to empty strings in 2 last columns.",
|
||||
example: r#"[
|
||||
[2021-04-16, 2021-06-10, 2021-09-18, 2021-10-15, 2021-11-16, 2021-11-17, 2021-11-18];
|
||||
[ 37, 0, 0, 0, 37, 0, 0]
|
||||
] | update cells -c ["2021-11-18", "2021-11-17"] {|value|
|
||||
if ($value | into int) == 0 {
|
||||
""
|
||||
} {
|
||||
$value
|
||||
}
|
||||
}"#,
|
||||
result: Some(vec![UntaggedValue::row(indexmap! {
|
||||
"2021-04-16".to_string() => UntaggedValue::int(37).into(),
|
||||
"2021-06-10".to_string() => UntaggedValue::int(0).into(),
|
||||
"2021-09-18".to_string() => UntaggedValue::int(0).into(),
|
||||
"2021-10-15".to_string() => UntaggedValue::int(0).into(),
|
||||
"2021-11-16".to_string() => UntaggedValue::int(37).into(),
|
||||
"2021-11-17".to_string() => Value::from(""),
|
||||
"2021-11-18".to_string() => Value::from(""),
|
||||
})
|
||||
.into()]),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
fn update_cells(args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let context = Arc::new(args.context.clone());
|
||||
let external_redirection = args.call_info.args.external_redirection;
|
||||
|
||||
let block: CapturedBlock = args.req(0)?;
|
||||
let block = Arc::new(block);
|
||||
|
||||
let columns = args
|
||||
.get_flag("columns")?
|
||||
.map(|x: Value| HashSet::from_iter(x.table_entries().map(|val| val.convert_to_string())));
|
||||
let columns = Arc::new(columns);
|
||||
|
||||
Ok(args
|
||||
.input
|
||||
.flat_map(move |input| {
|
||||
let block = block.clone();
|
||||
let context = context.clone();
|
||||
|
||||
if input.is_row() {
|
||||
OutputStream::one(process_cells(
|
||||
block,
|
||||
columns.clone(),
|
||||
context,
|
||||
input,
|
||||
external_redirection,
|
||||
))
|
||||
} else {
|
||||
match process_input(block, context, input, external_redirection) {
|
||||
Ok(s) => s,
|
||||
Err(e) => OutputStream::one(Value::error(e)),
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_output_stream())
|
||||
}
|
||||
|
||||
pub fn process_input(
|
||||
captured_block: Arc<CapturedBlock>,
|
||||
context: Arc<EvaluationContext>,
|
||||
input: Value,
|
||||
external_redirection: ExternalRedirection,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let input_clone = input.clone();
|
||||
// When we process a row, we need to know whether the block wants to have the contents of the row as
|
||||
// a parameter to the block (so it gets assigned to a variable that can be used inside the block) or
|
||||
// if it wants the contents as as an input stream
|
||||
|
||||
let input_stream = if !captured_block.block.params.positional.is_empty() {
|
||||
InputStream::empty()
|
||||
} else {
|
||||
vec![Ok(input_clone)].into_iter().into_input_stream()
|
||||
};
|
||||
|
||||
context.scope.enter_scope();
|
||||
context.scope.add_vars(&captured_block.captured.entries);
|
||||
|
||||
if let Some((arg, _)) = captured_block.block.params.positional.first() {
|
||||
context.scope.add_var(arg.name(), input);
|
||||
} else {
|
||||
context.scope.add_var("$it", input);
|
||||
}
|
||||
|
||||
let result = run_block(
|
||||
&captured_block.block,
|
||||
&context,
|
||||
input_stream,
|
||||
external_redirection,
|
||||
);
|
||||
|
||||
context.scope.exit_scope();
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn process_cells(
|
||||
captured_block: Arc<CapturedBlock>,
|
||||
columns: Arc<Option<HashSet<String>>>,
|
||||
context: Arc<EvaluationContext>,
|
||||
input: Value,
|
||||
external_redirection: ExternalRedirection,
|
||||
) -> Value {
|
||||
TaggedDictBuilder::build(input.tag(), |row| {
|
||||
input.row_entries().for_each(|(column, cell_value)| {
|
||||
match &*columns {
|
||||
Some(col) if !col.contains(column) => {
|
||||
row.insert_value(column, cell_value.clone());
|
||||
return;
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
let cell_processed = process_input(
|
||||
captured_block.clone(),
|
||||
context.clone(),
|
||||
cell_value.clone(),
|
||||
external_redirection,
|
||||
)
|
||||
.map(|it| it.into_vec())
|
||||
.map_err(Value::error);
|
||||
|
||||
match cell_processed {
|
||||
Ok(value) => {
|
||||
match value.get(0) {
|
||||
Some(one) => {
|
||||
row.insert_value(column, one.clone());
|
||||
}
|
||||
None => {
|
||||
row.insert_untagged(column, UntaggedValue::nothing());
|
||||
}
|
||||
};
|
||||
}
|
||||
Err(reason) => {
|
||||
row.insert_value(column, reason);
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
}
|
@ -136,7 +136,8 @@ fn to_string_tagged_value(v: &Value) -> Result<String, ShellError> {
|
||||
| Primitive::Boolean(_)
|
||||
| Primitive::Decimal(_)
|
||||
| Primitive::FilePath(_)
|
||||
| Primitive::Int(_),
|
||||
| Primitive::Int(_)
|
||||
| Primitive::BigInt(_),
|
||||
) => as_string(v),
|
||||
UntaggedValue::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
|
||||
UntaggedValue::Primitive(Primitive::Nothing) => Ok(String::new()),
|
||||
|
@ -112,7 +112,11 @@ mod tests {
|
||||
|
||||
fn only_examples() -> Vec<Command> {
|
||||
let mut commands = full_tests();
|
||||
commands.extend([whole_stream_command(Zip), whole_stream_command(Flatten)]);
|
||||
commands.extend([
|
||||
whole_stream_command(UpdateCells),
|
||||
whole_stream_command(Zip),
|
||||
whole_stream_command(Flatten),
|
||||
]);
|
||||
commands
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{operate, PathSubcommandArguments};
|
||||
use super::{column_paths_from_args, operate, PathSubcommandArguments};
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
@ -9,13 +9,13 @@ use std::path::Path;
|
||||
pub struct PathBasename;
|
||||
|
||||
struct PathBasenameArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
columns: Vec<ColumnPath>,
|
||||
replace: Option<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl PathSubcommandArguments for PathBasenameArguments {
|
||||
fn get_column_paths(&self) -> &Vec<ColumnPath> {
|
||||
&self.rest
|
||||
&self.columns
|
||||
}
|
||||
}
|
||||
|
||||
@ -26,10 +26,11 @@ impl WholeStreamCommand for PathBasename {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path basename")
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
.named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"Optionally operate by column path",
|
||||
Some('c'),
|
||||
)
|
||||
.named(
|
||||
"replace",
|
||||
@ -46,7 +47,7 @@ impl WholeStreamCommand for PathBasename {
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let cmd_args = Arc::new(PathBasenameArguments {
|
||||
rest: args.rest(0)?,
|
||||
columns: column_paths_from_args(&args)?,
|
||||
replace: args.get_flag("replace")?,
|
||||
});
|
||||
|
||||
@ -58,12 +59,17 @@ impl WholeStreamCommand for PathBasename {
|
||||
vec![
|
||||
Example {
|
||||
description: "Get basename of a path",
|
||||
example: "echo 'C:\\Users\\joe\\test.txt' | path basename",
|
||||
example: "'C:\\Users\\joe\\test.txt' | path basename",
|
||||
result: Some(vec![Value::from("test.txt")]),
|
||||
},
|
||||
Example {
|
||||
description: "Get basename of a path in a column",
|
||||
example: "ls .. | path basename -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Replace basename of a path",
|
||||
example: "echo 'C:\\Users\\joe\\test.txt' | path basename -r 'spam.png'",
|
||||
example: "'C:\\Users\\joe\\test.txt' | path basename -r 'spam.png'",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
"C:\\Users\\joe\\spam.png",
|
||||
))]),
|
||||
@ -76,12 +82,17 @@ impl WholeStreamCommand for PathBasename {
|
||||
vec![
|
||||
Example {
|
||||
description: "Get basename of a path",
|
||||
example: "echo '/home/joe/test.txt' | path basename",
|
||||
example: "'/home/joe/test.txt' | path basename",
|
||||
result: Some(vec![Value::from("test.txt")]),
|
||||
},
|
||||
Example {
|
||||
description: "Get basename of a path in a column",
|
||||
example: "ls .. | path basename -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Replace basename of a path",
|
||||
example: "echo '/home/joe/test.txt' | path basename -r 'spam.png'",
|
||||
example: "'/home/joe/test.txt' | path basename -r 'spam.png'",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
"/home/joe/spam.png",
|
||||
))]),
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{operate, PathSubcommandArguments};
|
||||
use super::{column_paths_from_args, operate, PathSubcommandArguments};
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
@ -9,14 +9,14 @@ use std::path::Path;
|
||||
pub struct PathDirname;
|
||||
|
||||
struct PathDirnameArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
columns: Vec<ColumnPath>,
|
||||
replace: Option<Tagged<String>>,
|
||||
num_levels: Option<Tagged<u32>>,
|
||||
}
|
||||
|
||||
impl PathSubcommandArguments for PathDirnameArguments {
|
||||
fn get_column_paths(&self) -> &Vec<ColumnPath> {
|
||||
&self.rest
|
||||
&self.columns
|
||||
}
|
||||
}
|
||||
|
||||
@ -27,10 +27,11 @@ impl WholeStreamCommand for PathDirname {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path dirname")
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
.named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"Optionally operate by column path",
|
||||
Some('c'),
|
||||
)
|
||||
.named(
|
||||
"replace",
|
||||
@ -53,7 +54,7 @@ impl WholeStreamCommand for PathDirname {
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let cmd_args = Arc::new(PathDirnameArguments {
|
||||
rest: args.rest(0)?,
|
||||
columns: column_paths_from_args(&args)?,
|
||||
replace: args.get_flag("replace")?,
|
||||
num_levels: args.get_flag("num-levels")?,
|
||||
});
|
||||
@ -66,20 +67,25 @@ impl WholeStreamCommand for PathDirname {
|
||||
vec![
|
||||
Example {
|
||||
description: "Get dirname of a path",
|
||||
example: "echo 'C:\\Users\\joe\\code\\test.txt' | path dirname",
|
||||
example: "'C:\\Users\\joe\\code\\test.txt' | path dirname",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
"C:\\Users\\joe\\code",
|
||||
))]),
|
||||
},
|
||||
Example {
|
||||
description: "Get dirname of a path in a column",
|
||||
example: "ls ('.' | path expand) | path dirname -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Walk up two levels",
|
||||
example: "echo 'C:\\Users\\joe\\code\\test.txt' | path dirname -n 2",
|
||||
example: "'C:\\Users\\joe\\code\\test.txt' | path dirname -n 2",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath("C:\\Users\\joe"))]),
|
||||
},
|
||||
Example {
|
||||
description: "Replace the part that would be returned with a custom path",
|
||||
example:
|
||||
"echo 'C:\\Users\\joe\\code\\test.txt' | path dirname -n 2 -r C:\\Users\\viking",
|
||||
"'C:\\Users\\joe\\code\\test.txt' | path dirname -n 2 -r C:\\Users\\viking",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
"C:\\Users\\viking\\code\\test.txt",
|
||||
))]),
|
||||
@ -92,17 +98,22 @@ impl WholeStreamCommand for PathDirname {
|
||||
vec![
|
||||
Example {
|
||||
description: "Get dirname of a path",
|
||||
example: "echo '/home/joe/code/test.txt' | path dirname",
|
||||
example: "'/home/joe/code/test.txt' | path dirname",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath("/home/joe/code"))]),
|
||||
},
|
||||
Example {
|
||||
description: "Get dirname of a path in a column",
|
||||
example: "ls ('.' | path expand) | path dirname -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Walk up two levels",
|
||||
example: "echo '/home/joe/code/test.txt' | path dirname -n 2",
|
||||
example: "'/home/joe/code/test.txt' | path dirname -n 2",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath("/home/joe"))]),
|
||||
},
|
||||
Example {
|
||||
description: "Replace the part that would be returned with a custom path",
|
||||
example: "echo '/home/joe/code/test.txt' | path dirname -n 2 -r /home/viking",
|
||||
example: "'/home/joe/code/test.txt' | path dirname -n 2 -r /home/viking",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
"/home/viking/code/test.txt",
|
||||
))]),
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{operate, PathSubcommandArguments};
|
||||
use super::{column_paths_from_args, operate, PathSubcommandArguments};
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
@ -8,12 +8,12 @@ use std::path::Path;
|
||||
pub struct PathExists;
|
||||
|
||||
struct PathExistsArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
columns: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
impl PathSubcommandArguments for PathExistsArguments {
|
||||
fn get_column_paths(&self) -> &Vec<ColumnPath> {
|
||||
&self.rest
|
||||
&self.columns
|
||||
}
|
||||
}
|
||||
|
||||
@ -23,10 +23,11 @@ impl WholeStreamCommand for PathExists {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path exists").rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
Signature::build("path exists").named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"Optionally operate by column path",
|
||||
Some('c'),
|
||||
)
|
||||
}
|
||||
|
||||
@ -37,7 +38,7 @@ impl WholeStreamCommand for PathExists {
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let cmd_args = Arc::new(PathExistsArguments {
|
||||
rest: args.rest(0)?,
|
||||
columns: column_paths_from_args(&args)?,
|
||||
});
|
||||
|
||||
Ok(operate(args.input, &action, tag.span, cmd_args))
|
||||
@ -45,20 +46,34 @@ impl WholeStreamCommand for PathExists {
|
||||
|
||||
#[cfg(windows)]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Check if a file exists",
|
||||
example: "echo 'C:\\Users\\joe\\todo.txt' | path exists",
|
||||
result: Some(vec![Value::from(UntaggedValue::boolean(false))]),
|
||||
}]
|
||||
vec![
|
||||
Example {
|
||||
description: "Check if a file exists",
|
||||
example: "'C:\\Users\\joe\\todo.txt' | path exists",
|
||||
result: Some(vec![Value::from(UntaggedValue::boolean(false))]),
|
||||
},
|
||||
Example {
|
||||
description: "Check if a file exists in a column",
|
||||
example: "ls | path exists -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Check if a file exists",
|
||||
example: "echo '/home/joe/todo.txt' | path exists",
|
||||
result: Some(vec![Value::from(UntaggedValue::boolean(false))]),
|
||||
}]
|
||||
vec![
|
||||
Example {
|
||||
description: "Check if a file exists",
|
||||
example: "'/home/joe/todo.txt' | path exists",
|
||||
result: Some(vec![Value::from(UntaggedValue::boolean(false))]),
|
||||
},
|
||||
Example {
|
||||
description: "Check if a file exists in a column",
|
||||
example: "ls | path exists -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{operate, PathSubcommandArguments};
|
||||
use super::{column_paths_from_args, operate, PathSubcommandArguments};
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
@ -11,12 +11,12 @@ pub struct PathExpand;
|
||||
|
||||
struct PathExpandArguments {
|
||||
strict: bool,
|
||||
rest: Vec<ColumnPath>,
|
||||
columns: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
impl PathSubcommandArguments for PathExpandArguments {
|
||||
fn get_column_paths(&self) -> &Vec<ColumnPath> {
|
||||
&self.rest
|
||||
&self.columns
|
||||
}
|
||||
}
|
||||
|
||||
@ -32,10 +32,11 @@ impl WholeStreamCommand for PathExpand {
|
||||
"Throw an error if the path could not be expanded",
|
||||
Some('s'),
|
||||
)
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
.named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"Optionally operate by column path",
|
||||
Some('c'),
|
||||
)
|
||||
}
|
||||
|
||||
@ -47,7 +48,7 @@ impl WholeStreamCommand for PathExpand {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let cmd_args = Arc::new(PathExpandArguments {
|
||||
strict: args.has_flag("strict"),
|
||||
rest: args.rest(0)?,
|
||||
columns: column_paths_from_args(&args)?,
|
||||
});
|
||||
|
||||
Ok(operate(args.input, &action, tag.span, cmd_args))
|
||||
@ -63,6 +64,11 @@ impl WholeStreamCommand for PathExpand {
|
||||
UntaggedValue::filepath(r"C:\Users\joe\bar").into_value(Span::new(0, 25))
|
||||
]),
|
||||
},
|
||||
Example {
|
||||
description: "Expand a path in a column",
|
||||
example: "ls | path expand -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Expand a relative path",
|
||||
example: r"'foo\..\bar' | path expand",
|
||||
@ -83,6 +89,11 @@ impl WholeStreamCommand for PathExpand {
|
||||
UntaggedValue::filepath("/home/joe/bar").into_value(Span::new(0, 22))
|
||||
]),
|
||||
},
|
||||
Example {
|
||||
description: "Expand a path in a column",
|
||||
example: "ls | path expand -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Expand a relative path",
|
||||
example: "'foo/../bar' | path expand",
|
||||
|
@ -1,4 +1,6 @@
|
||||
use super::{handle_value, join_path, operate_column_paths, PathSubcommandArguments};
|
||||
use super::{
|
||||
column_paths_from_args, handle_value, join_path, operate_column_paths, PathSubcommandArguments,
|
||||
};
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
@ -9,13 +11,13 @@ use std::path::{Path, PathBuf};
|
||||
pub struct PathJoin;
|
||||
|
||||
struct PathJoinArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
columns: Vec<ColumnPath>,
|
||||
append: Option<Tagged<PathBuf>>,
|
||||
}
|
||||
|
||||
impl PathSubcommandArguments for PathJoinArguments {
|
||||
fn get_column_paths(&self) -> &Vec<ColumnPath> {
|
||||
&self.rest
|
||||
&self.columns
|
||||
}
|
||||
}
|
||||
|
||||
@ -26,16 +28,16 @@ impl WholeStreamCommand for PathJoin {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path join")
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
"Optionally operate by column path",
|
||||
)
|
||||
.named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"Optionally operate by column path",
|
||||
Some('c'),
|
||||
)
|
||||
.optional(
|
||||
"append",
|
||||
SyntaxShape::FilePath,
|
||||
"Path to append to the input",
|
||||
Some('a'),
|
||||
)
|
||||
}
|
||||
|
||||
@ -50,9 +52,10 @@ the output of 'path parse' and 'path split' subcommands."#
|
||||
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
|
||||
let cmd_args = Arc::new(PathJoinArguments {
|
||||
rest: args.rest(0)?,
|
||||
append: args.get_flag("append")?,
|
||||
columns: column_paths_from_args(&args)?,
|
||||
append: args.opt(0)?,
|
||||
});
|
||||
|
||||
Ok(operate_join(args.input, &action, tag, cmd_args))
|
||||
@ -63,21 +66,26 @@ the output of 'path parse' and 'path split' subcommands."#
|
||||
vec![
|
||||
Example {
|
||||
description: "Append a filename to a path",
|
||||
example: r"echo 'C:\Users\viking' | path join -a spam.txt",
|
||||
example: r"'C:\Users\viking' | path join spam.txt",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
r"C:\Users\viking\spam.txt",
|
||||
))]),
|
||||
},
|
||||
Example {
|
||||
description: "Append a filename to a path inside a column",
|
||||
example: r"ls | path join spam.txt -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Join a list of parts into a path",
|
||||
example: r"echo [ 'C:' '\' 'Users' 'viking' 'spam.txt' ] | path join",
|
||||
example: r"[ 'C:' '\' 'Users' 'viking' 'spam.txt' ] | path join",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
r"C:\Users\viking\spam.txt",
|
||||
))]),
|
||||
},
|
||||
Example {
|
||||
description: "Join a structured path into a path",
|
||||
example: r"echo [ [parent stem extension]; ['C:\Users\viking' 'spam' 'txt']] | path join",
|
||||
example: r"[ [parent stem extension]; ['C:\Users\viking' 'spam' 'txt']] | path join",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
r"C:\Users\viking\spam.txt",
|
||||
))]),
|
||||
@ -90,21 +98,26 @@ the output of 'path parse' and 'path split' subcommands."#
|
||||
vec![
|
||||
Example {
|
||||
description: "Append a filename to a path",
|
||||
example: r"echo '/home/viking' | path join -a spam.txt",
|
||||
example: r"'/home/viking' | path join spam.txt",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
r"/home/viking/spam.txt",
|
||||
))]),
|
||||
},
|
||||
Example {
|
||||
description: "Append a filename to a path inside a column",
|
||||
example: r"ls | path join spam.txt -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Join a list of parts into a path",
|
||||
example: r"echo [ '/' 'home' 'viking' 'spam.txt' ] | path join",
|
||||
example: r"[ '/' 'home' 'viking' 'spam.txt' ] | path join",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
r"/home/viking/spam.txt",
|
||||
))]),
|
||||
},
|
||||
Example {
|
||||
description: "Join a structured path into a path",
|
||||
example: r"echo [[ parent stem extension ]; [ '/home/viking' 'spam' 'txt' ]] | path join",
|
||||
example: r"[[ parent stem extension ]; [ '/home/viking' 'spam' 'txt' ]] | path join",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(
|
||||
r"/home/viking/spam.txt",
|
||||
))]),
|
||||
|
@ -61,7 +61,7 @@ fn encode_path(
|
||||
ALLOWED_COLUMNS.join(", ")
|
||||
);
|
||||
return Err(ShellError::labeled_error_with_secondary(
|
||||
"Invalid column name",
|
||||
"Expected structured path table",
|
||||
msg,
|
||||
new_span,
|
||||
"originates from here",
|
||||
@ -216,3 +216,36 @@ where
|
||||
operate_column_paths(input, action, span, args)
|
||||
}
|
||||
}
|
||||
|
||||
fn column_paths_from_args(args: &CommandArgs) -> Result<Vec<ColumnPath>, ShellError> {
|
||||
let column_paths: Option<Vec<Value>> = args.get_flag("columns")?;
|
||||
let has_columns = column_paths.is_some();
|
||||
let column_paths = match column_paths {
|
||||
Some(cols) => {
|
||||
let mut c = Vec::new();
|
||||
for col in cols {
|
||||
let colpath = ColumnPath::build(&col.convert_to_string().spanned_unknown());
|
||||
if !colpath.is_empty() {
|
||||
c.push(colpath)
|
||||
}
|
||||
}
|
||||
c
|
||||
}
|
||||
None => Vec::new(),
|
||||
};
|
||||
|
||||
if has_columns && column_paths.is_empty() {
|
||||
let colval: Option<Value> = args.get_flag("columns")?;
|
||||
let colspan = match colval {
|
||||
Some(v) => v.tag.span,
|
||||
None => Span::unknown(),
|
||||
};
|
||||
return Err(ShellError::labeled_error(
|
||||
"Requires a list of columns",
|
||||
"must be a list of columns",
|
||||
colspan,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(column_paths)
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{operate, PathSubcommandArguments};
|
||||
use super::{column_paths_from_args, operate, PathSubcommandArguments};
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
@ -11,13 +11,13 @@ use std::path::Path;
|
||||
pub struct PathParse;
|
||||
|
||||
struct PathParseArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
columns: Vec<ColumnPath>,
|
||||
extension: Option<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl PathSubcommandArguments for PathParseArguments {
|
||||
fn get_column_paths(&self) -> &Vec<ColumnPath> {
|
||||
&self.rest
|
||||
&self.columns
|
||||
}
|
||||
}
|
||||
|
||||
@ -28,10 +28,11 @@ impl WholeStreamCommand for PathParse {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path parse")
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
.named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"Optionally operate by column path",
|
||||
Some('c'),
|
||||
)
|
||||
.named(
|
||||
"extension",
|
||||
@ -53,7 +54,7 @@ On Windows, an extra 'prefix' column is added."#
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let cmd_args = Arc::new(PathParseArguments {
|
||||
rest: args.rest(0)?,
|
||||
columns: column_paths_from_args(&args)?,
|
||||
extension: args.get_flag("extension")?,
|
||||
});
|
||||
|
||||
@ -65,22 +66,22 @@ On Windows, an extra 'prefix' column is added."#
|
||||
vec![
|
||||
Example {
|
||||
description: "Parse a single path",
|
||||
example: r"echo 'C:\Users\viking\spam.txt' | path parse",
|
||||
example: r"'C:\Users\viking\spam.txt' | path parse",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Replace a complex extension",
|
||||
example: r"echo 'C:\Users\viking\spam.tar.gz' | path parse -e tar.gz | update extension { 'txt' }",
|
||||
example: r"'C:\Users\viking\spam.tar.gz' | path parse -e tar.gz | update extension { 'txt' }",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Ignore the extension",
|
||||
example: r"echo 'C:\Users\viking.d' | path parse -e ''",
|
||||
example: r"'C:\Users\viking.d' | path parse -e ''",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Parse all paths under the 'name' column",
|
||||
example: r"ls | path parse name",
|
||||
example: r"ls | path parse -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
@ -91,22 +92,22 @@ On Windows, an extra 'prefix' column is added."#
|
||||
vec![
|
||||
Example {
|
||||
description: "Parse a path",
|
||||
example: r"echo '/home/viking/spam.txt' | path parse",
|
||||
example: r"'/home/viking/spam.txt' | path parse",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Replace a complex extension",
|
||||
example: r"echo '/home/viking/spam.tar.gz' | path parse -e tar.gz | update extension { 'txt' }",
|
||||
example: r"'/home/viking/spam.tar.gz' | path parse -e tar.gz | update extension { 'txt' }",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Ignore the extension",
|
||||
example: r"echo '/etc/conf.d' | path parse -e ''",
|
||||
example: r"'/etc/conf.d' | path parse -e ''",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Parse all paths under the 'name' column",
|
||||
example: r"ls | path parse name",
|
||||
example: r"ls | path parse -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{operate, PathSubcommandArguments};
|
||||
use super::{column_paths_from_args, operate, PathSubcommandArguments};
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
@ -10,12 +10,12 @@ pub struct PathRelativeTo;
|
||||
|
||||
struct PathRelativeToArguments {
|
||||
path: Tagged<PathBuf>,
|
||||
rest: Vec<ColumnPath>,
|
||||
columns: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
impl PathSubcommandArguments for PathRelativeToArguments {
|
||||
fn get_column_paths(&self) -> &Vec<ColumnPath> {
|
||||
&self.rest
|
||||
&self.columns
|
||||
}
|
||||
}
|
||||
|
||||
@ -31,10 +31,11 @@ impl WholeStreamCommand for PathRelativeTo {
|
||||
SyntaxShape::FilePath,
|
||||
"Parent shared with the input path",
|
||||
)
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
.named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"Optionally operate by column path",
|
||||
Some('c'),
|
||||
)
|
||||
}
|
||||
|
||||
@ -52,7 +53,7 @@ path."#
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let cmd_args = Arc::new(PathRelativeToArguments {
|
||||
path: args.req(0)?,
|
||||
rest: args.rest(1)?,
|
||||
columns: column_paths_from_args(&args)?,
|
||||
});
|
||||
|
||||
Ok(operate(args.input, &action, tag.span, cmd_args))
|
||||
@ -66,6 +67,11 @@ path."#
|
||||
example: r"'C:\Users\viking' | path relative-to 'C:\Users'",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(r"viking"))]),
|
||||
},
|
||||
Example {
|
||||
description: "Find a relative path from two absolute paths in a column",
|
||||
example: "ls ~ | path relative-to ~ -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Find a relative path from two relative paths",
|
||||
example: r"'eggs\bacon\sausage\spam' | path relative-to 'eggs\bacon\sausage'",
|
||||
@ -82,6 +88,11 @@ path."#
|
||||
example: r"'/home/viking' | path relative-to '/home'",
|
||||
result: Some(vec![Value::from(UntaggedValue::filepath(r"viking"))]),
|
||||
},
|
||||
Example {
|
||||
description: "Find a relative path from two absolute paths in a column",
|
||||
example: "ls ~ | path relative-to ~ -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Find a relative path from two relative paths",
|
||||
example: r"'eggs/bacon/sausage/spam' | path relative-to 'eggs/bacon/sausage'",
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{handle_value, operate_column_paths, PathSubcommandArguments};
|
||||
use super::{column_paths_from_args, handle_value, operate_column_paths, PathSubcommandArguments};
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
@ -8,12 +8,12 @@ use std::path::Path;
|
||||
pub struct PathSplit;
|
||||
|
||||
struct PathSplitArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
columns: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
impl PathSubcommandArguments for PathSplitArguments {
|
||||
fn get_column_paths(&self) -> &Vec<ColumnPath> {
|
||||
&self.rest
|
||||
&self.columns
|
||||
}
|
||||
}
|
||||
|
||||
@ -23,10 +23,11 @@ impl WholeStreamCommand for PathSplit {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path split").rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
Signature::build("path split").named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"Optionally operate by column path",
|
||||
Some('c'),
|
||||
)
|
||||
}
|
||||
|
||||
@ -37,7 +38,7 @@ impl WholeStreamCommand for PathSplit {
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let cmd_args = Arc::new(PathSplitArguments {
|
||||
rest: args.rest(0)?,
|
||||
columns: column_paths_from_args(&args)?,
|
||||
});
|
||||
|
||||
Ok(operate_split(args.input, &action, tag.span, cmd_args))
|
||||
@ -48,7 +49,7 @@ impl WholeStreamCommand for PathSplit {
|
||||
vec![
|
||||
Example {
|
||||
description: "Split a path into parts",
|
||||
example: r"echo 'C:\Users\viking\spam.txt' | path split",
|
||||
example: r"'C:\Users\viking\spam.txt' | path split",
|
||||
result: Some(vec![
|
||||
Value::from(UntaggedValue::string("C:")),
|
||||
Value::from(UntaggedValue::string(r"\")),
|
||||
@ -59,7 +60,7 @@ impl WholeStreamCommand for PathSplit {
|
||||
},
|
||||
Example {
|
||||
description: "Split all paths under the 'name' column",
|
||||
example: r"ls | path split name",
|
||||
example: r"ls ('.' | path expand) | path split -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
@ -70,7 +71,7 @@ impl WholeStreamCommand for PathSplit {
|
||||
vec![
|
||||
Example {
|
||||
description: "Split a path into parts",
|
||||
example: r"echo '/home/viking/spam.txt' | path split",
|
||||
example: r"'/home/viking/spam.txt' | path split",
|
||||
result: Some(vec![
|
||||
Value::from(UntaggedValue::string("/")),
|
||||
Value::from(UntaggedValue::string("home")),
|
||||
@ -80,7 +81,7 @@ impl WholeStreamCommand for PathSplit {
|
||||
},
|
||||
Example {
|
||||
description: "Split all paths under the 'name' column",
|
||||
example: r"ls | path split name",
|
||||
example: r"ls ('.' | path expand) | path split -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{operate, PathSubcommandArguments};
|
||||
use super::{column_paths_from_args, operate, PathSubcommandArguments};
|
||||
use crate::prelude::*;
|
||||
use nu_engine::filesystem::filesystem_shell::get_file_type;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
@ -9,12 +9,12 @@ use std::path::Path;
|
||||
pub struct PathType;
|
||||
|
||||
struct PathTypeArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
columns: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
impl PathSubcommandArguments for PathTypeArguments {
|
||||
fn get_column_paths(&self) -> &Vec<ColumnPath> {
|
||||
&self.rest
|
||||
&self.columns
|
||||
}
|
||||
}
|
||||
|
||||
@ -24,10 +24,11 @@ impl WholeStreamCommand for PathType {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path type").rest(
|
||||
"rest",
|
||||
SyntaxShape::ColumnPath,
|
||||
Signature::build("path type").named(
|
||||
"columns",
|
||||
SyntaxShape::Table,
|
||||
"Optionally operate by column path",
|
||||
Some('c'),
|
||||
)
|
||||
}
|
||||
|
||||
@ -38,18 +39,25 @@ impl WholeStreamCommand for PathType {
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let cmd_args = Arc::new(PathTypeArguments {
|
||||
rest: args.rest(0)?,
|
||||
columns: column_paths_from_args(&args)?,
|
||||
});
|
||||
|
||||
Ok(operate(args.input, &action, tag.span, cmd_args))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Show type of a filepath",
|
||||
example: "echo '.' | path type",
|
||||
result: Some(vec![Value::from("Dir")]),
|
||||
}]
|
||||
vec![
|
||||
Example {
|
||||
description: "Show type of a filepath",
|
||||
example: "'.' | path type",
|
||||
result: Some(vec![Value::from("Dir")]),
|
||||
},
|
||||
Example {
|
||||
description: "Show type of a filepath in a column",
|
||||
example: "ls | path type -c [ name ]",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
|
51
crates/nu-command/src/commands/shells/goto.rs
Normal file
51
crates/nu-command/src/commands/shells/goto.rs
Normal file
@ -0,0 +1,51 @@
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{CommandAction, ReturnSuccess, Signature, SyntaxShape};
|
||||
|
||||
pub struct Goto;
|
||||
|
||||
impl WholeStreamCommand for Goto {
|
||||
fn name(&self) -> &str {
|
||||
"g"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("g").required("index", SyntaxShape::Int, "the shell's index to go to")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Go to specified shell."
|
||||
}
|
||||
|
||||
fn run_with_actions(&self, args: CommandArgs) -> Result<ActionStream, ShellError> {
|
||||
goto(args)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Enter the first shell",
|
||||
example: "g 0",
|
||||
result: None,
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
fn goto(args: CommandArgs) -> Result<ActionStream, ShellError> {
|
||||
Ok(ActionStream::one(ReturnSuccess::action(
|
||||
CommandAction::GotoShell(args.req(0)?),
|
||||
)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::Goto;
|
||||
use super::ShellError;
|
||||
|
||||
#[test]
|
||||
fn examples_work_as_expected() -> Result<(), ShellError> {
|
||||
use crate::examples::test as test_examples;
|
||||
|
||||
test_examples(Goto {})
|
||||
}
|
||||
}
|
@ -1,11 +1,13 @@
|
||||
mod command;
|
||||
mod enter;
|
||||
mod exit;
|
||||
mod goto;
|
||||
mod next;
|
||||
mod prev;
|
||||
|
||||
pub use command::Shells;
|
||||
pub use enter::Enter;
|
||||
pub use exit::Exit;
|
||||
pub use goto::Goto;
|
||||
pub use next::Next;
|
||||
pub use prev::Previous;
|
||||
|
283
crates/nu-command/src/commands/strings/detect/columns.rs
Normal file
283
crates/nu-command/src/commands/strings/detect/columns.rs
Normal file
@ -0,0 +1,283 @@
|
||||
use std::{iter::Peekable, str::CharIndices};
|
||||
|
||||
use crate::prelude::*;
|
||||
use nu_engine::WholeStreamCommand;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue};
|
||||
use nu_source::Spanned;
|
||||
|
||||
type Input<'t> = Peekable<CharIndices<'t>>;
|
||||
|
||||
pub struct DetectColumns;
|
||||
|
||||
impl WholeStreamCommand for DetectColumns {
|
||||
fn name(&self) -> &str {
|
||||
"detect columns"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("detect columns")
|
||||
.named(
|
||||
"skip",
|
||||
SyntaxShape::Int,
|
||||
"number of rows to skip before detecting",
|
||||
Some('s'),
|
||||
)
|
||||
.switch("no_headers", "don't detect headers", Some('n'))
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"splits contents across multiple columns via the separator."
|
||||
}
|
||||
|
||||
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
detect_columns(args)
|
||||
}
|
||||
}
|
||||
|
||||
fn detect_columns(args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = args.name_tag();
|
||||
let num_rows_to_skip: Option<usize> = args.get_flag("skip")?;
|
||||
let noheader = args.has_flag("no_headers");
|
||||
let input = args.input.collect_string(name_tag.clone())?;
|
||||
|
||||
let input: Vec<_> = input
|
||||
.lines()
|
||||
.skip(num_rows_to_skip.unwrap_or_default())
|
||||
.map(|x| x.to_string())
|
||||
.collect();
|
||||
|
||||
let mut input = input.into_iter();
|
||||
let headers = input.next();
|
||||
|
||||
if let Some(orig_headers) = headers {
|
||||
let headers = find_columns(&orig_headers);
|
||||
|
||||
Ok((if noheader {
|
||||
vec![orig_headers].into_iter().chain(input)
|
||||
} else {
|
||||
vec![].into_iter().chain(input)
|
||||
})
|
||||
.map(move |x| {
|
||||
let row = find_columns(&x);
|
||||
|
||||
let mut dict = TaggedDictBuilder::new(name_tag.clone());
|
||||
|
||||
if headers.len() == row.len() && !noheader {
|
||||
for (header, val) in headers.iter().zip(row.iter()) {
|
||||
dict.insert_untagged(&header.item, UntaggedValue::string(&val.item));
|
||||
}
|
||||
} else {
|
||||
let mut pre_output = vec![];
|
||||
|
||||
// column counts don't line up, so see if we can figure out why
|
||||
for cell in row {
|
||||
for header in &headers {
|
||||
if cell.span.start() <= header.span.end()
|
||||
&& cell.span.end() > header.span.start()
|
||||
{
|
||||
pre_output
|
||||
.push((header.item.to_string(), UntaggedValue::string(&cell.item)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for header in &headers {
|
||||
let mut found = false;
|
||||
for pre_o in &pre_output {
|
||||
if pre_o.0 == header.item {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
pre_output.push((header.item.to_string(), UntaggedValue::nothing()));
|
||||
}
|
||||
}
|
||||
|
||||
if noheader {
|
||||
for header in headers.iter().enumerate() {
|
||||
for pre_o in &pre_output {
|
||||
if pre_o.0 == header.1.item {
|
||||
dict.insert_untagged(format!("Column{}", header.0), pre_o.1.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for header in &headers {
|
||||
for pre_o in &pre_output {
|
||||
if pre_o.0 == header.item {
|
||||
dict.insert_untagged(&header.item, pre_o.1.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dict.into_value()
|
||||
})
|
||||
.into_output_stream())
|
||||
} else {
|
||||
Ok(OutputStream::empty())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_columns(input: &str) -> Vec<Spanned<String>> {
|
||||
let mut chars = input.char_indices().peekable();
|
||||
let mut output = vec![];
|
||||
|
||||
while let Some((_, c)) = chars.peek() {
|
||||
if c.is_whitespace() {
|
||||
// If the next character is non-newline whitespace, skip it.
|
||||
|
||||
let _ = chars.next();
|
||||
} else {
|
||||
// Otherwise, try to consume an unclassified token.
|
||||
|
||||
let result = baseline(&mut chars);
|
||||
|
||||
output.push(result);
|
||||
}
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
enum BlockKind {
|
||||
Paren,
|
||||
CurlyBracket,
|
||||
SquareBracket,
|
||||
}
|
||||
|
||||
fn baseline(src: &mut Input) -> Spanned<String> {
|
||||
let mut token_contents = String::new();
|
||||
|
||||
let start_offset = if let Some((pos, _)) = src.peek() {
|
||||
*pos
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
// This variable tracks the starting character of a string literal, so that
|
||||
// we remain inside the string literal lexer mode until we encounter the
|
||||
// closing quote.
|
||||
let mut quote_start: Option<char> = None;
|
||||
|
||||
// This Vec tracks paired delimiters
|
||||
let mut block_level: Vec<BlockKind> = vec![];
|
||||
|
||||
// A baseline token is terminated if it's not nested inside of a paired
|
||||
// delimiter and the next character is one of: `|`, `;`, `#` or any
|
||||
// whitespace.
|
||||
fn is_termination(block_level: &[BlockKind], c: char) -> bool {
|
||||
block_level.is_empty() && (c.is_whitespace())
|
||||
}
|
||||
|
||||
// The process of slurping up a baseline token repeats:
|
||||
//
|
||||
// - String literal, which begins with `'`, `"` or `\``, and continues until
|
||||
// the same character is encountered again.
|
||||
// - Delimiter pair, which begins with `[`, `(`, or `{`, and continues until
|
||||
// the matching closing delimiter is found, skipping comments and string
|
||||
// literals.
|
||||
// - When not nested inside of a delimiter pair, when a terminating
|
||||
// character (whitespace, `|`, `;` or `#`) is encountered, the baseline
|
||||
// token is done.
|
||||
// - Otherwise, accumulate the character into the current baseline token.
|
||||
while let Some((_, c)) = src.peek() {
|
||||
let c = *c;
|
||||
|
||||
if quote_start.is_some() {
|
||||
// If we encountered the closing quote character for the current
|
||||
// string, we're done with the current string.
|
||||
if Some(c) == quote_start {
|
||||
quote_start = None;
|
||||
}
|
||||
} else if c == '\n' {
|
||||
if is_termination(&block_level, c) {
|
||||
break;
|
||||
}
|
||||
} else if c == '\'' || c == '"' || c == '`' {
|
||||
// We encountered the opening quote of a string literal.
|
||||
quote_start = Some(c);
|
||||
} else if c == '[' {
|
||||
// We encountered an opening `[` delimiter.
|
||||
block_level.push(BlockKind::SquareBracket);
|
||||
} else if c == ']' {
|
||||
// We encountered a closing `]` delimiter. Pop off the opening `[`
|
||||
// delimiter.
|
||||
if let Some(BlockKind::SquareBracket) = block_level.last() {
|
||||
let _ = block_level.pop();
|
||||
}
|
||||
} else if c == '{' {
|
||||
// We encountered an opening `{` delimiter.
|
||||
block_level.push(BlockKind::CurlyBracket);
|
||||
} else if c == '}' {
|
||||
// We encountered a closing `}` delimiter. Pop off the opening `{`.
|
||||
if let Some(BlockKind::CurlyBracket) = block_level.last() {
|
||||
let _ = block_level.pop();
|
||||
}
|
||||
} else if c == '(' {
|
||||
// We enceountered an opening `(` delimiter.
|
||||
block_level.push(BlockKind::Paren);
|
||||
} else if c == ')' {
|
||||
// We encountered a closing `)` delimiter. Pop off the opening `(`.
|
||||
if let Some(BlockKind::Paren) = block_level.last() {
|
||||
let _ = block_level.pop();
|
||||
}
|
||||
} else if is_termination(&block_level, c) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Otherwise, accumulate the character into the current token.
|
||||
token_contents.push(c);
|
||||
|
||||
// Consume the character.
|
||||
let _ = src.next();
|
||||
}
|
||||
|
||||
let span = Span::new(start_offset, start_offset + token_contents.len());
|
||||
|
||||
// If there is still unclosed opening delimiters, close them and add
|
||||
// synthetic closing characters to the accumulated token.
|
||||
if block_level.last().is_some() {
|
||||
// let delim: char = (*block).closing();
|
||||
// let cause = ParseError::unexpected_eof(delim.to_string(), span);
|
||||
|
||||
// while let Some(bk) = block_level.pop() {
|
||||
// token_contents.push(bk.closing());
|
||||
// }
|
||||
|
||||
return token_contents.spanned(span);
|
||||
}
|
||||
|
||||
if quote_start.is_some() {
|
||||
// The non-lite parse trims quotes on both sides, so we add the expected quote so that
|
||||
// anyone wanting to consume this partial parse (e.g., completions) will be able to get
|
||||
// correct information from the non-lite parse.
|
||||
// token_contents.push(delimiter);
|
||||
|
||||
// return (
|
||||
// token_contents.spanned(span),
|
||||
// Some(ParseError::unexpected_eof(delimiter.to_string(), span)),
|
||||
// );
|
||||
return token_contents.spanned(span);
|
||||
}
|
||||
|
||||
token_contents.spanned(span)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::DetectColumns;
|
||||
use super::ShellError;
|
||||
|
||||
#[test]
|
||||
fn examples_work_as_expected() -> Result<(), ShellError> {
|
||||
use crate::examples::test as test_examples;
|
||||
|
||||
test_examples(DetectColumns {})
|
||||
}
|
||||
}
|
3
crates/nu-command/src/commands/strings/detect/mod.rs
Normal file
3
crates/nu-command/src/commands/strings/detect/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod columns;
|
||||
|
||||
pub use columns::DetectColumns;
|
@ -1,5 +1,6 @@
|
||||
mod build_string;
|
||||
mod char_;
|
||||
mod detect;
|
||||
mod format;
|
||||
mod lines;
|
||||
mod parse;
|
||||
@ -10,6 +11,7 @@ mod str_;
|
||||
|
||||
pub use build_string::BuildString;
|
||||
pub use char_::Char;
|
||||
pub use detect::DetectColumns;
|
||||
pub use format::*;
|
||||
pub use lines::Lines;
|
||||
pub use parse::*;
|
||||
|
@ -11,13 +11,6 @@ use std::collections::HashMap;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::time::Instant;
|
||||
|
||||
#[cfg(feature = "table-pager")]
|
||||
use {
|
||||
futures::future::join,
|
||||
minus::{ExitStrategy, Pager},
|
||||
std::fmt::Write,
|
||||
};
|
||||
|
||||
const STREAM_PAGE_SIZE: usize = 1000;
|
||||
const STREAM_TIMEOUT_CHECK_INTERVAL: usize = 100;
|
||||
|
||||
@ -186,28 +179,9 @@ fn table(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
|
||||
let term_width = args.host().lock().width();
|
||||
|
||||
#[cfg(feature = "table-pager")]
|
||||
let pager = Pager::new()
|
||||
.set_exit_strategy(ExitStrategy::PagerQuit)
|
||||
.set_searchable(true)
|
||||
.set_page_if_havent_overflowed(false)
|
||||
.set_input_handler(Box::new(input_handling::MinusInputHandler {}))
|
||||
.finish();
|
||||
|
||||
let stream_data = async {
|
||||
let finished = Arc::new(AtomicBool::new(false));
|
||||
// we are required to clone finished, for use within the callback, otherwise we get borrow errors
|
||||
#[cfg(feature = "table-pager")]
|
||||
let finished_within_callback = finished.clone();
|
||||
#[cfg(feature = "table-pager")]
|
||||
{
|
||||
// This is called when the pager finishes, to indicate to the
|
||||
// while loop below to finish, in case of long running InputStream consumer
|
||||
// that doesn't finish by the time the user quits out of the pager
|
||||
pager.lock().await.add_exit_callback(move || {
|
||||
finished_within_callback.store(true, Ordering::Relaxed);
|
||||
});
|
||||
}
|
||||
while !finished.clone().load(Ordering::Relaxed) {
|
||||
let mut new_input: VecDeque<Value> = VecDeque::new();
|
||||
|
||||
@ -263,161 +237,22 @@ fn table(mut args: CommandArgs) -> Result<OutputStream, ShellError> {
|
||||
if !input.is_empty() {
|
||||
let t = from_list(&input, &configuration, start_number, &color_hm);
|
||||
let output = draw_table(&t, term_width, &color_hm);
|
||||
#[cfg(feature = "table-pager")]
|
||||
{
|
||||
let mut pager = pager.lock().await;
|
||||
writeln!(pager.lines, "{}", output).map_err(|_| {
|
||||
ShellError::untagged_runtime_error("Error writing to pager")
|
||||
})?;
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "table-pager"))]
|
||||
println!("{}", output);
|
||||
}
|
||||
|
||||
start_number += input.len();
|
||||
}
|
||||
|
||||
#[cfg(feature = "table-pager")]
|
||||
{
|
||||
let mut pager_lock = pager.lock().await;
|
||||
pager_lock.data_finished();
|
||||
}
|
||||
|
||||
Result::<_, ShellError>::Ok(())
|
||||
};
|
||||
|
||||
#[cfg(feature = "table-pager")]
|
||||
{
|
||||
let (minus_result, streaming_result) =
|
||||
block_on(join(minus::async_std_updating(pager.clone()), stream_data));
|
||||
minus_result.map_err(|_| ShellError::untagged_runtime_error("Error paging data"))?;
|
||||
streaming_result?;
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "table-pager"))]
|
||||
block_on(stream_data)
|
||||
.map_err(|_| ShellError::untagged_runtime_error("Error streaming data"))?;
|
||||
|
||||
Ok(OutputStream::empty())
|
||||
}
|
||||
|
||||
#[cfg(feature = "table-pager")]
|
||||
mod input_handling {
|
||||
use crossterm::event::{Event, KeyCode, KeyEvent, KeyModifiers, MouseEvent, MouseEventKind};
|
||||
use minus::{InputEvent, InputHandler, LineNumbers, SearchMode};
|
||||
pub struct MinusInputHandler;
|
||||
|
||||
impl InputHandler for MinusInputHandler {
|
||||
fn handle_input(
|
||||
&self,
|
||||
ev: Event,
|
||||
upper_mark: usize,
|
||||
search_mode: SearchMode,
|
||||
ln: LineNumbers,
|
||||
rows: usize,
|
||||
) -> Option<InputEvent> {
|
||||
match ev {
|
||||
// Scroll up by one.
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::Up,
|
||||
modifiers: KeyModifiers::NONE,
|
||||
}) => Some(InputEvent::UpdateUpperMark(upper_mark.saturating_sub(1))),
|
||||
|
||||
// Scroll down by one.
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::Down,
|
||||
modifiers: KeyModifiers::NONE,
|
||||
}) => Some(InputEvent::UpdateUpperMark(upper_mark.saturating_add(1))),
|
||||
|
||||
// Mouse scroll up/down
|
||||
Event::Mouse(MouseEvent {
|
||||
kind: MouseEventKind::ScrollUp,
|
||||
..
|
||||
}) => Some(InputEvent::UpdateUpperMark(upper_mark.saturating_sub(5))),
|
||||
Event::Mouse(MouseEvent {
|
||||
kind: MouseEventKind::ScrollDown,
|
||||
..
|
||||
}) => Some(InputEvent::UpdateUpperMark(upper_mark.saturating_add(5))),
|
||||
// Go to top.
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::Home,
|
||||
modifiers: KeyModifiers::NONE,
|
||||
}) => Some(InputEvent::UpdateUpperMark(0)),
|
||||
// Go to bottom.
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::End,
|
||||
modifiers: KeyModifiers::NONE,
|
||||
}) => Some(InputEvent::UpdateUpperMark(usize::MAX)),
|
||||
|
||||
// Page Up/Down
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::PageUp,
|
||||
modifiers: KeyModifiers::NONE,
|
||||
}) => Some(InputEvent::UpdateUpperMark(
|
||||
upper_mark.saturating_sub(rows - 1),
|
||||
)),
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::PageDown,
|
||||
modifiers: KeyModifiers::NONE,
|
||||
}) => Some(InputEvent::UpdateUpperMark(
|
||||
upper_mark.saturating_add(rows - 1),
|
||||
)),
|
||||
|
||||
// Resize event from the terminal.
|
||||
Event::Resize(_, height) => Some(InputEvent::UpdateRows(height as usize)),
|
||||
// Switch line number display.
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::Char('l'),
|
||||
modifiers: KeyModifiers::CONTROL,
|
||||
}) => Some(InputEvent::UpdateLineNumber(!ln)),
|
||||
// Quit.
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::Char('q'),
|
||||
modifiers: KeyModifiers::NONE,
|
||||
})
|
||||
| Event::Key(KeyEvent {
|
||||
code: KeyCode::Char('Q'),
|
||||
modifiers: KeyModifiers::SHIFT,
|
||||
})
|
||||
| Event::Key(KeyEvent {
|
||||
code: KeyCode::Esc,
|
||||
modifiers: KeyModifiers::NONE,
|
||||
})
|
||||
| Event::Key(KeyEvent {
|
||||
code: KeyCode::Char('c'),
|
||||
modifiers: KeyModifiers::CONTROL,
|
||||
}) => Some(InputEvent::Exit),
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::Char('/'),
|
||||
modifiers: KeyModifiers::NONE,
|
||||
}) => Some(InputEvent::Search(SearchMode::Unknown)),
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::Down,
|
||||
modifiers: KeyModifiers::CONTROL,
|
||||
}) => {
|
||||
if search_mode == SearchMode::Unknown {
|
||||
Some(InputEvent::NextMatch)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Event::Key(KeyEvent {
|
||||
code: KeyCode::Up,
|
||||
modifiers: KeyModifiers::CONTROL,
|
||||
}) => {
|
||||
if search_mode == SearchMode::Unknown {
|
||||
Some(InputEvent::PrevMatch)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::Command;
|
||||
|
@ -76,17 +76,14 @@ impl ConfigExtensions for NuConfig {
|
||||
fn header_style(&self) -> TextStyle {
|
||||
// FIXME: I agree, this is the long way around, please suggest and alternative.
|
||||
let head_color = get_color_from_key_and_subkey(self, "color_config", "header_color");
|
||||
let head_color_style = match head_color {
|
||||
Some(s) => {
|
||||
lookup_ansi_color_style(s.as_string().unwrap_or_else(|_| "green".to_string()))
|
||||
}
|
||||
None => nu_ansi_term::Color::Green.normal(),
|
||||
};
|
||||
let head_bold = get_color_from_key_and_subkey(self, "color_config", "header_bold");
|
||||
let head_bold_bool = match head_bold {
|
||||
Some(b) => header_bold_from_value(Some(&b)),
|
||||
None => true,
|
||||
let (head_color_style, head_bold_bool) = match head_color {
|
||||
Some(s) => (
|
||||
lookup_ansi_color_style(s.as_string().unwrap_or_else(|_| "green".to_string())),
|
||||
header_bold_from_value(Some(&s)),
|
||||
),
|
||||
None => (nu_ansi_term::Color::Green.normal(), true),
|
||||
};
|
||||
|
||||
let head_align = get_color_from_key_and_subkey(self, "color_config", "header_align");
|
||||
let head_alignment = match head_align {
|
||||
Some(a) => header_alignment_from_value(Some(&a)),
|
||||
|
@ -77,6 +77,7 @@ pub fn create_default_context(interactive: bool) -> Result<EvaluationContext, Bo
|
||||
// Shells
|
||||
whole_stream_command(Next),
|
||||
whole_stream_command(Previous),
|
||||
whole_stream_command(Goto),
|
||||
whole_stream_command(Shells),
|
||||
whole_stream_command(Enter),
|
||||
whole_stream_command(Exit),
|
||||
@ -126,6 +127,7 @@ pub fn create_default_context(interactive: bool) -> Result<EvaluationContext, Bo
|
||||
whole_stream_command(AnsiStrip),
|
||||
whole_stream_command(AnsiGradient),
|
||||
whole_stream_command(Char),
|
||||
whole_stream_command(DetectColumns),
|
||||
// Column manipulation
|
||||
whole_stream_command(DropColumn),
|
||||
whole_stream_command(MoveColumn),
|
||||
@ -133,9 +135,11 @@ pub fn create_default_context(interactive: bool) -> Result<EvaluationContext, Bo
|
||||
whole_stream_command(Select),
|
||||
whole_stream_command(Get),
|
||||
whole_stream_command(Update),
|
||||
whole_stream_command(UpdateCells),
|
||||
whole_stream_command(Insert),
|
||||
whole_stream_command(Into),
|
||||
whole_stream_command(IntoBinary),
|
||||
whole_stream_command(IntoColumnPath),
|
||||
whole_stream_command(IntoInt),
|
||||
whole_stream_command(IntoFilepath),
|
||||
whole_stream_command(IntoFilesize),
|
||||
|
@ -21,8 +21,8 @@ use crate::commands::{
|
||||
};
|
||||
|
||||
use crate::commands::{
|
||||
Append, BuildString, Collect, Each, Echo, First, Get, Keep, Last, Let, Math, MathMode, Nth,
|
||||
Select, StrCollect, Wrap,
|
||||
Append, BuildString, Collect, Each, Echo, First, Get, If, IntoInt, Keep, Last, Let, Math,
|
||||
MathMode, Nth, Select, StrCollect, Wrap,
|
||||
};
|
||||
use nu_engine::{run_block, whole_stream_command, Command, EvaluationContext, WholeStreamCommand};
|
||||
use nu_stream::InputStream;
|
||||
@ -41,6 +41,8 @@ pub fn test_examples(cmd: Command) -> Result<(), ShellError> {
|
||||
whole_stream_command(BuildString {}),
|
||||
whole_stream_command(First {}),
|
||||
whole_stream_command(Get {}),
|
||||
whole_stream_command(If {}),
|
||||
whole_stream_command(IntoInt {}),
|
||||
whole_stream_command(Keep {}),
|
||||
whole_stream_command(Each {}),
|
||||
whole_stream_command(Last {}),
|
||||
@ -253,6 +255,8 @@ pub fn test_anchors(cmd: Command) -> Result<(), ShellError> {
|
||||
whole_stream_command(BuildString {}),
|
||||
whole_stream_command(First {}),
|
||||
whole_stream_command(Get {}),
|
||||
whole_stream_command(If {}),
|
||||
whole_stream_command(IntoInt {}),
|
||||
whole_stream_command(Keep {}),
|
||||
whole_stream_command(Each {}),
|
||||
whole_stream_command(Last {}),
|
||||
|
@ -8,7 +8,7 @@ fn returns_path_joined_with_column_path() {
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo [ [name]; [eggs] ]
|
||||
| path join -a spam.txt name
|
||||
| path join spam.txt -c [ name ]
|
||||
| get name
|
||||
"#
|
||||
));
|
||||
@ -23,7 +23,7 @@ fn returns_path_joined_from_list() {
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo [ home viking spam.txt ]
|
||||
| path join
|
||||
| path join
|
||||
"#
|
||||
));
|
||||
|
||||
@ -37,7 +37,7 @@ fn appends_slash_when_joined_with_empty_path() {
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "/some/dir"
|
||||
| path join -a ''
|
||||
| path join ''
|
||||
"#
|
||||
));
|
||||
|
||||
@ -51,7 +51,7 @@ fn returns_joined_path_when_joining_empty_path() {
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo ""
|
||||
| path join -a foo.txt
|
||||
| path join foo.txt
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -48,7 +48,7 @@ fn parses_custom_extension_gets_extension() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo 'home/viking/spam.tar.gz'
|
||||
echo 'home/viking/spam.tar.gz'
|
||||
| path parse -e tar.gz
|
||||
| get extension
|
||||
"#
|
||||
@ -62,7 +62,7 @@ fn parses_custom_extension_gets_stem() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo 'home/viking/spam.tar.gz'
|
||||
echo 'home/viking/spam.tar.gz'
|
||||
| path parse -e tar.gz
|
||||
| get stem
|
||||
"#
|
||||
@ -76,7 +76,7 @@ fn parses_ignoring_extension_gets_extension() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo 'home/viking/spam.tar.gz'
|
||||
echo 'home/viking/spam.tar.gz'
|
||||
| path parse -e ''
|
||||
| get extension
|
||||
"#
|
||||
@ -90,7 +90,7 @@ fn parses_ignoring_extension_gets_stem() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo 'home/viking/spam.tar.gz'
|
||||
echo 'home/viking/spam.tar.gz'
|
||||
| path parse -e ""
|
||||
| get stem
|
||||
"#
|
||||
@ -105,7 +105,7 @@ fn parses_column_path_extension() {
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo [[home, barn]; ['home/viking/spam.txt', 'barn/cow/moo.png']]
|
||||
| path parse home barn
|
||||
| path parse -c [ home barn ]
|
||||
| get barn
|
||||
| get extension
|
||||
"#
|
||||
|
@ -18,7 +18,7 @@ fn splits_correctly_single_path() {
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo ['home/viking/spam.txt']
|
||||
| path split
|
||||
| path split
|
||||
| last
|
||||
"#
|
||||
));
|
||||
@ -37,7 +37,7 @@ fn splits_correctly_with_column_path() {
|
||||
['home/viking/spam.txt', 'barn/cow/moo.png']
|
||||
['home/viking/eggs.txt', 'barn/goat/cheese.png']
|
||||
]
|
||||
| path split home barn
|
||||
| path split -c [ home barn ]
|
||||
| get barn
|
||||
| length
|
||||
"#
|
||||
|
@ -306,3 +306,21 @@ fn rm_wildcard_leading_dot_deletes_dotfiles() {
|
||||
assert!(!files_exist_at(vec![".bar"], dirs.test()));
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn removes_files_with_case_sensitive_glob_matches_by_default() {
|
||||
Playground::setup("glob_test", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![EmptyFile("A0"), EmptyFile("a1")]);
|
||||
|
||||
nu!(
|
||||
cwd: dirs.root(),
|
||||
"rm glob_test/A*"
|
||||
);
|
||||
|
||||
let deleted_path = dirs.test().join("A0");
|
||||
let skipped_path = dirs.test().join("a1");
|
||||
|
||||
assert!(!deleted_path.exists());
|
||||
assert!(skipped_path.exists());
|
||||
})
|
||||
}
|
||||
|
@ -4,22 +4,19 @@ description = "Completions for nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-completion"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-engine = { version = "0.37.0", path="../nu-engine" }
|
||||
nu-data = { version = "0.37.0", path="../nu-data" }
|
||||
nu-errors = { version = "0.37.0", path="../nu-errors" }
|
||||
nu-parser = { version = "0.37.0", path="../nu-parser" }
|
||||
nu-path = { version = "0.37.0", path="../nu-path" }
|
||||
nu-protocol = { version = "0.37.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.37.0", path="../nu-source" }
|
||||
nu-test-support = { version = "0.37.0", path="../nu-test-support" }
|
||||
|
||||
dirs-next = "2.0.0"
|
||||
nu-engine = { version = "0.40.0", path="../nu-engine" }
|
||||
nu-data = { version = "0.40.0", path="../nu-data" }
|
||||
nu-parser = { version = "0.40.0", path="../nu-parser" }
|
||||
nu-path = { version = "0.40.0", path="../nu-path" }
|
||||
nu-protocol = { version = "0.40.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.40.0", path="../nu-source" }
|
||||
nu-test-support = { version = "0.40.0", path="../nu-test-support" }
|
||||
indexmap = { version="1.6.1", features=["serde-1"] }
|
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
|
@ -238,11 +238,19 @@ pub fn completion_location(line: &str, block: &Block, pos: usize) -> Vec<Complet
|
||||
}
|
||||
}
|
||||
|
||||
output.push(loc.clone());
|
||||
output.push({
|
||||
let mut partial_loc = loc.clone();
|
||||
partial_loc.span = Span::new(loc.span.start(), pos);
|
||||
partial_loc
|
||||
});
|
||||
output
|
||||
}
|
||||
}
|
||||
_ => vec![loc.clone()],
|
||||
_ => vec![{
|
||||
let mut partial_loc = loc.clone();
|
||||
partial_loc.span = Span::new(loc.span.start(), pos);
|
||||
partial_loc
|
||||
}],
|
||||
};
|
||||
} else if pos < loc.span.start() {
|
||||
break;
|
||||
@ -339,7 +347,7 @@ mod tests {
|
||||
line: &str,
|
||||
scope: &dyn ParserScope,
|
||||
pos: usize,
|
||||
) -> Vec<LocationType> {
|
||||
) -> Vec<CompletionLocation> {
|
||||
let (tokens, _) = lex(line, 0, nu_parser::NewlineMode::Normal);
|
||||
let (lite_block, _) = parse_block(tokens);
|
||||
|
||||
@ -348,9 +356,6 @@ mod tests {
|
||||
scope.exit_scope();
|
||||
|
||||
super::completion_location(line, &block, pos)
|
||||
.into_iter()
|
||||
.map(|v| v.item)
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -362,7 +367,7 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
completion_location(line, ®istry, 10),
|
||||
vec![LocationType::Command],
|
||||
vec![LocationType::Command.spanned(Span::new(9, 10)),],
|
||||
);
|
||||
}
|
||||
|
||||
@ -373,7 +378,7 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
completion_location(line, ®istry, 10),
|
||||
vec![LocationType::Command],
|
||||
vec![LocationType::Command.spanned(Span::new(9, 10)),],
|
||||
);
|
||||
}
|
||||
|
||||
@ -384,7 +389,7 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
completion_location(line, ®istry, 4),
|
||||
vec![LocationType::Command],
|
||||
vec![LocationType::Command.spanned(Span::new(0, 4)),],
|
||||
);
|
||||
}
|
||||
|
||||
@ -395,7 +400,7 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
completion_location(line, ®istry, 13),
|
||||
vec![LocationType::Variable],
|
||||
vec![LocationType::Variable.spanned(Span::new(5, 13)),],
|
||||
);
|
||||
}
|
||||
|
||||
@ -410,7 +415,7 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
completion_location(line, ®istry, 7),
|
||||
vec![LocationType::Flag("du".to_string())],
|
||||
vec![LocationType::Flag("du".to_string()).spanned(Span::new(3, 7)),],
|
||||
);
|
||||
}
|
||||
|
||||
@ -421,7 +426,7 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
completion_location(line, ®istry, 8),
|
||||
vec![LocationType::Command],
|
||||
vec![LocationType::Command.spanned(Span::new(6, 8)),],
|
||||
);
|
||||
}
|
||||
|
||||
@ -433,8 +438,8 @@ mod tests {
|
||||
assert_eq!(
|
||||
completion_location(line, ®istry, 3),
|
||||
vec![
|
||||
LocationType::Command,
|
||||
LocationType::Argument(Some("cd".to_string()), None)
|
||||
LocationType::Command.spanned(Span::new(0, 3)),
|
||||
LocationType::Argument(Some("cd".to_string()), None).spanned(Span::new(3, 3)),
|
||||
],
|
||||
);
|
||||
}
|
||||
@ -451,8 +456,8 @@ mod tests {
|
||||
assert_eq!(
|
||||
completion_location(line, ®istry, 3),
|
||||
vec![
|
||||
LocationType::Argument(Some("du".to_string()), None),
|
||||
LocationType::Flag("du".to_string()),
|
||||
LocationType::Argument(Some("du".to_string()), None).spanned(Span::new(3, 4)),
|
||||
LocationType::Flag("du".to_string()).spanned(Span::new(3, 4)),
|
||||
],
|
||||
);
|
||||
}
|
||||
@ -467,8 +472,24 @@ mod tests {
|
||||
assert_eq!(
|
||||
completion_location(line, ®istry, 6),
|
||||
vec![
|
||||
LocationType::Command,
|
||||
LocationType::Argument(Some("echo".to_string()), None)
|
||||
LocationType::Command.spanned(Span::new(0, 6)),
|
||||
LocationType::Argument(Some("echo".to_string()), None).spanned(Span::new(5, 6)),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_argument_when_cursor_inside_argument() {
|
||||
let registry: VecRegistry =
|
||||
vec![Signature::build("echo").rest("rest", SyntaxShape::Any, "the values to echo")]
|
||||
.into();
|
||||
let line = "echo 123";
|
||||
|
||||
assert_eq!(
|
||||
completion_location(line, ®istry, 6),
|
||||
vec![
|
||||
LocationType::Command.spanned(Span::new(0, 6)),
|
||||
LocationType::Argument(Some("echo".to_string()), None).spanned(Span::new(5, 6)),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ description = "CLI for nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-data"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@ -16,30 +16,25 @@ chrono = "0.4.19"
|
||||
common-path = "1.0.0"
|
||||
derive-new = "0.5.8"
|
||||
directories-next = "2.0.0"
|
||||
dirs-next = "2.0.0"
|
||||
getset = "0.1.1"
|
||||
indexmap = { version="1.6.1", features=["serde-1"] }
|
||||
log = "0.4.14"
|
||||
num-bigint = { version="0.3.1", features=["serde"] }
|
||||
num-format = "0.4.0"
|
||||
num-traits = "0.2.14"
|
||||
query_interface = "0.3.5"
|
||||
serde = { version="1.0.123", features=["derive"] }
|
||||
sha2 = "0.9.3"
|
||||
sys-locale = "0.1.0"
|
||||
toml = "0.5.8"
|
||||
|
||||
nu-errors = { version = "0.37.0", path="../nu-errors" }
|
||||
nu-path = { version = "0.37.0", path="../nu-path" }
|
||||
nu-protocol = { version = "0.37.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.37.0", path="../nu-source" }
|
||||
nu-table = { version = "0.37.0", path="../nu-table" }
|
||||
nu-test-support = { version = "0.37.0", path="../nu-test-support" }
|
||||
nu-value-ext = { version = "0.37.0", path="../nu-value-ext" }
|
||||
nu-ansi-term = { version = "0.37.0", path="../nu-ansi-term" }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
users = "0.11.0"
|
||||
nu-errors = { version = "0.40.0", path="../nu-errors" }
|
||||
nu-path = { version = "0.40.0", path="../nu-path" }
|
||||
nu-protocol = { version = "0.40.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.40.0", path="../nu-source" }
|
||||
nu-table = { version = "0.40.0", path="../nu-table" }
|
||||
nu-test-support = { version = "0.40.0", path="../nu-test-support" }
|
||||
nu-value-ext = { version = "0.40.0", path="../nu-value-ext" }
|
||||
nu-ansi-term = { version = "0.40.0", path="../nu-ansi-term" }
|
||||
|
||||
[features]
|
||||
dataframe = ["nu-protocol/dataframe"]
|
||||
|
@ -102,7 +102,6 @@ pub fn string_to_lookup_value(str_prim: &str) -> String {
|
||||
"separator_color" => "separator_color".to_string(),
|
||||
"header_align" => "header_align".to_string(),
|
||||
"header_color" => "header_color".to_string(),
|
||||
"header_bold" => "header_bold".to_string(),
|
||||
"header_style" => "header_style".to_string(),
|
||||
"index_color" => "index_color".to_string(),
|
||||
"leading_trailing_space_bg" => "leading_trailing_space_bg".to_string(),
|
||||
@ -144,7 +143,6 @@ pub fn get_color_config(config: &NuConfig) -> HashMap<String, Style> {
|
||||
hm.insert("separator_color".to_string(), Color::White.normal());
|
||||
hm.insert("header_align".to_string(), Color::Green.bold());
|
||||
hm.insert("header_color".to_string(), Color::Green.bold());
|
||||
hm.insert("header_bold".to_string(), Color::Green.bold());
|
||||
hm.insert("header_style".to_string(), Style::default());
|
||||
hm.insert("index_color".to_string(), Color::Green.bold());
|
||||
hm.insert(
|
||||
@ -204,9 +202,6 @@ pub fn get_color_config(config: &NuConfig) -> HashMap<String, Style> {
|
||||
"header_color" => {
|
||||
update_hashmap(key, value, &mut hm);
|
||||
}
|
||||
"header_bold" => {
|
||||
update_hashmap(key, value, &mut hm);
|
||||
}
|
||||
"header_style" => {
|
||||
update_hashmap(key, value, &mut hm);
|
||||
}
|
||||
@ -358,14 +353,7 @@ pub fn style_primitive(primitive: &str, color_hm: &HashMap<String, Style>) -> Te
|
||||
let style = color_hm.get("header_color");
|
||||
match style {
|
||||
Some(s) => TextStyle::with_style(Alignment::Center, *s),
|
||||
None => TextStyle::default_header(),
|
||||
}
|
||||
}
|
||||
"header_bold" => {
|
||||
let style = color_hm.get("header_bold");
|
||||
match style {
|
||||
Some(s) => TextStyle::with_style(Alignment::Center, *s),
|
||||
None => TextStyle::default_header(),
|
||||
None => TextStyle::default_header().bold(Some(true)),
|
||||
}
|
||||
}
|
||||
"header_style" => {
|
||||
|
@ -4,39 +4,32 @@ description = "Core commands for nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-engine"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[dependencies]
|
||||
nu-data = { version = "0.37.0", path="../nu-data" }
|
||||
nu-errors = { version = "0.37.0", path="../nu-errors" }
|
||||
nu-parser = { version = "0.37.0", path="../nu-parser" }
|
||||
nu-plugin = { version = "0.37.0", path="../nu-plugin" }
|
||||
nu-protocol = { version = "0.37.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.37.0", path="../nu-source" }
|
||||
nu-stream = { version = "0.37.0", path="../nu-stream" }
|
||||
nu-value-ext = { version = "0.37.0", path="../nu-value-ext" }
|
||||
nu-ansi-term = { version = "0.37.0", path="../nu-ansi-term" }
|
||||
nu-test-support = { version = "0.37.0", path="../nu-test-support" }
|
||||
nu-path = { version = "0.37.0", path="../nu-path" }
|
||||
nu-data = { version = "0.40.0", path="../nu-data" }
|
||||
nu-errors = { version = "0.40.0", path="../nu-errors" }
|
||||
nu-parser = { version = "0.40.0", path="../nu-parser" }
|
||||
nu-plugin = { version = "0.40.0", path="../nu-plugin" }
|
||||
nu-protocol = { version = "0.40.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.40.0", path="../nu-source" }
|
||||
nu-stream = { version = "0.40.0", path="../nu-stream" }
|
||||
nu-value-ext = { version = "0.40.0", path="../nu-value-ext" }
|
||||
nu-ansi-term = { version = "0.40.0", path="../nu-ansi-term" }
|
||||
nu-test-support = { version = "0.40.0", path="../nu-test-support" }
|
||||
nu-path = { version = "0.40.0", path="../nu-path" }
|
||||
|
||||
trash = { version="1.3.0", optional=true }
|
||||
which = { version="4.0.2", optional=true }
|
||||
codespan-reporting = "0.11.0"
|
||||
dyn-clone = "1.0.4"
|
||||
ansi_term = "0.12.1"
|
||||
async-recursion = "0.3.2"
|
||||
async-trait = "0.1.42"
|
||||
bigdecimal = { package = "bigdecimal-rs", version = "0.2.1", features = ["serde"] }
|
||||
bytes = "0.5.6"
|
||||
bytes = "1.1.0"
|
||||
chrono = { version="0.4.19", features=["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
dirs-next = "2.0.0"
|
||||
encoding_rs = "0.8.28"
|
||||
filesize = "0.2.0"
|
||||
fs_extra = "1.2.0"
|
||||
futures = { version="0.3.12", features=["compat", "io-compat"] }
|
||||
futures-util = "0.3.12"
|
||||
futures_codec = "0.4.1"
|
||||
getset = "0.1.1"
|
||||
glob = "0.3.0"
|
||||
indexmap = { version="1.6.1", features=["serde-1"] }
|
||||
@ -44,8 +37,6 @@ itertools = "0.10.0"
|
||||
lazy_static = "1.*"
|
||||
log = "0.4.14"
|
||||
num-bigint = { version="0.3.1", features=["serde"] }
|
||||
num-format = "0.4.0"
|
||||
num-traits = "0.2.14"
|
||||
parking_lot = "0.11.1"
|
||||
rayon = "1.5.0"
|
||||
serde = { version="1.0.123", features=["derive"] }
|
||||
@ -59,7 +50,7 @@ umask = "1.0.0"
|
||||
users = "0.11.0"
|
||||
|
||||
[dev-dependencies]
|
||||
nu-test-support = { version = "0.37.0", path="../nu-test-support" }
|
||||
nu-test-support = { version = "0.40.0", path="../nu-test-support" }
|
||||
hamcrest2 = "0.3.0"
|
||||
|
||||
[features]
|
||||
|
@ -177,6 +177,9 @@ impl Iterator for InternalIterator {
|
||||
CommandAction::NextShell => {
|
||||
self.context.shell_manager().next();
|
||||
}
|
||||
CommandAction::GotoShell(i) => {
|
||||
self.context.shell_manager().goto(i);
|
||||
}
|
||||
CommandAction::LeaveShell(code) => {
|
||||
self.context.shell_manager().remove_at_current();
|
||||
if self.context.shell_manager().is_empty() {
|
||||
|
@ -28,6 +28,12 @@ use nu_errors::ShellError;
|
||||
use nu_protocol::{Primitive, ReturnSuccess, UntaggedValue};
|
||||
use nu_source::Tagged;
|
||||
|
||||
const GLOB_PARAMS: glob::MatchOptions = glob::MatchOptions {
|
||||
case_sensitive: true,
|
||||
require_literal_separator: false,
|
||||
require_literal_leading_dot: false,
|
||||
};
|
||||
|
||||
#[derive(Eq, PartialEq, Clone, Copy)]
|
||||
pub enum FilesystemShellMode {
|
||||
Cli,
|
||||
@ -159,7 +165,7 @@ impl Shell for FilesystemShell {
|
||||
|
||||
let hidden_dir_specified = is_hidden_dir(&path);
|
||||
|
||||
let mut paths = glob::glob(&path.to_string_lossy())
|
||||
let mut paths = glob::glob_with(&path.to_string_lossy(), GLOB_PARAMS)
|
||||
.map_err(|e| ShellError::labeled_error(e.to_string(), "invalid pattern", &p_tag))?
|
||||
.peekable();
|
||||
|
||||
@ -352,7 +358,7 @@ impl Shell for FilesystemShell {
|
||||
let source = path.join(&src.item);
|
||||
let destination = path.join(&dst.item);
|
||||
|
||||
let sources: Vec<_> = match glob::glob(&source.to_string_lossy()) {
|
||||
let sources: Vec<_> = match glob::glob_with(&source.to_string_lossy(), GLOB_PARAMS) {
|
||||
Ok(files) => files.collect(),
|
||||
Err(e) => {
|
||||
return Err(ShellError::labeled_error(
|
||||
@ -521,8 +527,8 @@ impl Shell for FilesystemShell {
|
||||
let source = path.join(&src.item);
|
||||
let destination = path.join(&dst.item);
|
||||
|
||||
let mut sources =
|
||||
glob::glob(&source.to_string_lossy()).map_or_else(|_| Vec::new(), Iterator::collect);
|
||||
let mut sources = glob::glob_with(&source.to_string_lossy(), GLOB_PARAMS)
|
||||
.map_or_else(|_| Vec::new(), Iterator::collect);
|
||||
|
||||
if sources.is_empty() {
|
||||
return Err(ShellError::labeled_error(
|
||||
@ -650,7 +656,7 @@ impl Shell for FilesystemShell {
|
||||
&path.to_string_lossy(),
|
||||
glob::MatchOptions {
|
||||
require_literal_leading_dot: true,
|
||||
..Default::default()
|
||||
..GLOB_PARAMS
|
||||
},
|
||||
) {
|
||||
Ok(files) => {
|
||||
|
@ -101,6 +101,7 @@ impl FromValue for i64 {
|
||||
v.as_i64()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromValue for Tagged<i64> {
|
||||
fn from_value(v: &Value) -> Result<Self, ShellError> {
|
||||
let tag = v.tag.clone();
|
||||
|
@ -136,6 +136,16 @@ impl ShellManager {
|
||||
self.set_path(self.path())
|
||||
}
|
||||
|
||||
pub fn goto(&self, i: usize) {
|
||||
{
|
||||
let shell_len = self.shells.lock().len();
|
||||
if i < shell_len {
|
||||
self.current_shell.store(i, Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
self.set_path(self.path())
|
||||
}
|
||||
|
||||
pub fn homedir(&self) -> Option<PathBuf> {
|
||||
let env = self.shells.lock();
|
||||
|
||||
|
@ -4,14 +4,14 @@ description = "Core error subsystem for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-errors"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-source = { path="../nu-source", version = "0.37.0" }
|
||||
nu-ansi-term = { version = "0.37.0", path="../nu-ansi-term" }
|
||||
nu-source = { path="../nu-source", version = "0.40.0" }
|
||||
nu-ansi-term = { version = "0.40.0", path="../nu-ansi-term" }
|
||||
|
||||
bigdecimal = { package = "bigdecimal-rs", version = "0.2.1", features = ["serde"] }
|
||||
codespan-reporting = { version="0.11.0", features=["serialization"] }
|
||||
|
@ -4,7 +4,7 @@ description = "Fork of serde-hjson"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-json"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -20,6 +20,6 @@ lazy_static = "1"
|
||||
linked-hash-map = { version="0.5", optional=true }
|
||||
|
||||
[dev-dependencies]
|
||||
nu-path = { version = "0.37.0", path="../nu-path" }
|
||||
nu-test-support = { version = "0.37.0", path="../nu-test-support" }
|
||||
nu-path = { version = "0.40.0", path="../nu-path" }
|
||||
nu-test-support = { version = "0.40.0", path="../nu-test-support" }
|
||||
serde_json = "1.0.39"
|
||||
|
@ -4,26 +4,23 @@ description = "Nushell parser"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-parser"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[dependencies]
|
||||
bigdecimal = { package = "bigdecimal-rs", version = "0.2.1", features = ["serde"] }
|
||||
codespan-reporting = "0.11.0"
|
||||
derive-new = "0.5.8"
|
||||
indexmap = { version="1.6.1", features=["serde-1"] }
|
||||
log = "0.4"
|
||||
num-bigint = { version="0.3.1", features=["serde"] }
|
||||
num-traits = "0.2.14"
|
||||
serde = "1.0"
|
||||
itertools = "0.10.0"
|
||||
smart-default = "0.6.0"
|
||||
|
||||
nu-errors = { version = "0.37.0", path="../nu-errors" }
|
||||
nu-data = { version = "0.37.0", path="../nu-data" }
|
||||
nu-path = { version = "0.37.0", path="../nu-path" }
|
||||
nu-protocol = { version = "0.37.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.37.0", path="../nu-source" }
|
||||
nu-test-support = { version = "0.37.0", path="../nu-test-support" }
|
||||
nu-errors = { version = "0.40.0", path="../nu-errors" }
|
||||
nu-data = { version = "0.40.0", path="../nu-data" }
|
||||
nu-path = { version = "0.40.0", path="../nu-path" }
|
||||
nu-protocol = { version = "0.40.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.40.0", path="../nu-source" }
|
||||
nu-test-support = { version = "0.40.0", path="../nu-test-support" }
|
||||
|
||||
[features]
|
||||
stable = []
|
||||
|
@ -1559,11 +1559,16 @@ fn parse_internal_command(
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
} else if error.is_none() {
|
||||
error = Some(ParseError::argument_error(
|
||||
lite_cmd.parts[0].clone(),
|
||||
ArgumentError::MissingValueForName(full_name.to_owned()),
|
||||
));
|
||||
} else {
|
||||
if error.is_none() {
|
||||
error = Some(ParseError::argument_error(
|
||||
lite_cmd.parts[0].clone(),
|
||||
ArgumentError::MissingValueForName(
|
||||
full_name.to_owned(),
|
||||
),
|
||||
));
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -61,12 +61,11 @@ fn find_source_file(
|
||||
|
||||
if let Some(dir) = lib_dirs {
|
||||
for lib_path in dir.into_iter().flatten() {
|
||||
let path = canonicalize_with(&file, lib_path).map_err(|e| {
|
||||
ParseError::general_error(
|
||||
format!("Can't load source file. Reason: {}", e.to_string()),
|
||||
"Can't load this file".spanned(file_span),
|
||||
)
|
||||
})?;
|
||||
let path = if let Ok(p) = canonicalize_with(&file, lib_path) {
|
||||
p
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if let Ok(contents) = std::fs::read_to_string(&path) {
|
||||
return parse(&contents, 0, scope);
|
||||
|
@ -4,11 +4,9 @@ description = "Path handling library for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-path"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[dependencies]
|
||||
dirs-next = "2.0.0"
|
||||
dunce = "1.0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
nu-test-support = { version = "0.37.0", path="../nu-test-support" }
|
||||
|
@ -1,3 +1 @@
|
||||
mod canonicalize;
|
||||
mod expand_path;
|
||||
mod util;
|
||||
|
@ -4,21 +4,18 @@ description = "Nushell Plugin"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-plugin"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = { path="../nu-errors", version = "0.37.0" }
|
||||
nu-protocol = { path="../nu-protocol", version = "0.37.0" }
|
||||
nu-source = { path="../nu-source", version = "0.37.0" }
|
||||
nu-test-support = { path="../nu-test-support", version = "0.37.0" }
|
||||
nu-value-ext = { path="../nu-value-ext", version = "0.37.0" }
|
||||
|
||||
bigdecimal = { package = "bigdecimal-rs", version = "0.2.1", features = ["serde"] }
|
||||
nu-errors = { path="../nu-errors", version = "0.40.0" }
|
||||
nu-protocol = { path="../nu-protocol", version = "0.40.0" }
|
||||
nu-source = { path="../nu-source", version = "0.40.0" }
|
||||
nu-test-support = { path="../nu-test-support", version = "0.40.0" }
|
||||
nu-value-ext = { path="../nu-value-ext", version = "0.40.0" }
|
||||
indexmap = { version="1.6.1", features=["serde-1"] }
|
||||
num-bigint = { version="0.3.1", features=["serde"] }
|
||||
serde = { version="1.0", features=["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
||||
|
@ -55,7 +55,6 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
match command {
|
||||
Ok(NuCommand::config) => {
|
||||
send_response(plugin.config());
|
||||
return;
|
||||
}
|
||||
Ok(NuCommand::begin_filter { params }) => {
|
||||
send_response(plugin.begin_filter(params));
|
||||
@ -65,23 +64,19 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
}
|
||||
Ok(NuCommand::end_filter) => {
|
||||
send_response(plugin.end_filter());
|
||||
return;
|
||||
}
|
||||
|
||||
Ok(NuCommand::sink { params }) => {
|
||||
plugin.sink(params.0, params.1);
|
||||
return;
|
||||
}
|
||||
Ok(NuCommand::quit) => {
|
||||
plugin.quit();
|
||||
return;
|
||||
}
|
||||
e => {
|
||||
send_response(ShellError::untagged_runtime_error(format!(
|
||||
"Could not handle plugin message: {} {:?}",
|
||||
input, e
|
||||
)));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ description = "Pretty hex dump of bytes slice in the common style."
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-pretty-hex"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@ -16,11 +16,11 @@ name = "nu_pretty_hex"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
nu-ansi-term = { path="../nu-ansi-term", version = "0.37.0" }
|
||||
nu-ansi-term = { path="../nu-ansi-term", version = "0.40.0" }
|
||||
rand = "0.8.3"
|
||||
|
||||
[dev-dependencies]
|
||||
heapless = "0.6.1"
|
||||
heapless = { version = "0.7.8", default-features = false }
|
||||
|
||||
# [features]
|
||||
# default = ["alloc"]
|
||||
|
@ -166,7 +166,7 @@ fn test_hex_write_with_simple_config() {
|
||||
core::str::from_utf8(b"00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f").unwrap();
|
||||
// let expected =
|
||||
// "\u{1b}[38;5;242m00\u{1b}[0m \u{1b}[1;35m01\u{1b}[0m \u{1b}[1;35m02\u{1b}[0m \u{1b}[1;";
|
||||
let mut buffer = heapless::Vec::<u8, heapless::consts::U50>::new();
|
||||
let mut buffer = heapless::Vec::<u8, 50>::new();
|
||||
|
||||
hex_write(&mut buffer, &bytes, config, None).unwrap();
|
||||
|
||||
|
@ -4,7 +4,7 @@ description = "Core values and protocols for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-protocol"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@ -18,23 +18,18 @@ derive-new = "0.5.8"
|
||||
getset = "0.1.1"
|
||||
indexmap = { version="1.6.1", features=["serde-1"] }
|
||||
log = "0.4.14"
|
||||
nu-errors = { path="../nu-errors", version = "0.37.0" }
|
||||
nu-source = { path="../nu-source", version = "0.37.0" }
|
||||
nu-errors = { path="../nu-errors", version = "0.40.0" }
|
||||
nu-source = { path="../nu-source", version = "0.40.0" }
|
||||
num-bigint = { version="0.3.1", features=["serde"] }
|
||||
num-integer = "0.1.44"
|
||||
num-traits = "0.2.14"
|
||||
serde = { version="1.0", features=["derive"] }
|
||||
serde_bytes = "0.11.5"
|
||||
|
||||
# implement conversions
|
||||
serde_json = "1.0"
|
||||
serde_yaml = "0.8.16"
|
||||
toml = "0.5.8"
|
||||
|
||||
[dependencies.polars]
|
||||
version = "0.15.1"
|
||||
version = "0.17.0"
|
||||
optional = true
|
||||
features = ["default", "serde", "rows", "strings", "checked_arithmetic", "object", "dtype-duration-ns"]
|
||||
features = ["default", "serde", "rows", "strings", "checked_arithmetic", "object", "dtype-date", "dtype-datetime", "dtype-time"]
|
||||
|
||||
[features]
|
||||
dataframe = ["polars"]
|
||||
|
@ -21,7 +21,7 @@ pub fn compute_between_dataframes(
|
||||
if let (UntaggedValue::DataFrame(lhs), UntaggedValue::DataFrame(rhs)) =
|
||||
(&left.value, &right.value)
|
||||
{
|
||||
let operation_span = left.tag.span.until(right.tag.span);
|
||||
let operation_span = right.tag.span.merge(left.tag.span);
|
||||
match (lhs.is_series(), rhs.is_series()) {
|
||||
(true, true) => {
|
||||
let lhs = &lhs
|
||||
@ -603,7 +603,7 @@ where
|
||||
{
|
||||
match series.dtype() {
|
||||
DataType::UInt32 | DataType::Int32 | DataType::UInt64 => {
|
||||
let to_i64 = series.cast_with_dtype(&DataType::Int64);
|
||||
let to_i64 = series.cast(&DataType::Int64);
|
||||
|
||||
match to_i64 {
|
||||
Ok(series) => {
|
||||
@ -661,7 +661,7 @@ where
|
||||
{
|
||||
match series.dtype() {
|
||||
DataType::Float32 => {
|
||||
let to_f64 = series.cast_with_dtype(&DataType::Float64);
|
||||
let to_f64 = series.cast(&DataType::Float64);
|
||||
|
||||
match to_f64 {
|
||||
Ok(series) => {
|
||||
@ -731,7 +731,7 @@ where
|
||||
{
|
||||
match series.dtype() {
|
||||
DataType::UInt32 | DataType::Int32 | DataType::UInt64 => {
|
||||
let to_i64 = series.cast_with_dtype(&DataType::Int64);
|
||||
let to_i64 = series.cast(&DataType::Int64);
|
||||
|
||||
match to_i64 {
|
||||
Ok(series) => {
|
||||
@ -789,7 +789,7 @@ where
|
||||
{
|
||||
match series.dtype() {
|
||||
DataType::Float32 => {
|
||||
let to_f64 = series.cast_with_dtype(&DataType::Float64);
|
||||
let to_f64 = series.cast(&DataType::Float64);
|
||||
|
||||
match to_f64 {
|
||||
Ok(series) => {
|
||||
|
@ -8,8 +8,8 @@ use nu_errors::ShellError;
|
||||
use nu_source::{Span, Tag};
|
||||
use num_bigint::BigInt;
|
||||
use polars::prelude::{
|
||||
DataFrame, DataType, Date64Type, Int64Type, IntoSeries, NamedFrom, NewChunkedArray, ObjectType,
|
||||
PolarsNumericType, Series, TimeUnit,
|
||||
DataFrame, DataType, DatetimeChunked, Int64Type, IntoSeries, NamedFrom, NewChunkedArray,
|
||||
ObjectType, PolarsNumericType, Series,
|
||||
};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
@ -310,8 +310,8 @@ pub fn create_column(
|
||||
}
|
||||
}
|
||||
}
|
||||
DataType::Date32 => {
|
||||
let casted = series.date32().map_err(|e| {
|
||||
DataType::Date => {
|
||||
let casted = series.date().map_err(|e| {
|
||||
ShellError::labeled_error(
|
||||
"Casting error",
|
||||
format!("casting error: {}", e),
|
||||
@ -347,8 +347,8 @@ pub fn create_column(
|
||||
|
||||
Ok(Column::new(casted.name().into(), values))
|
||||
}
|
||||
DataType::Date64 => {
|
||||
let casted = series.date64().map_err(|e| {
|
||||
DataType::Datetime => {
|
||||
let casted = series.datetime().map_err(|e| {
|
||||
ShellError::labeled_error(
|
||||
"Casting error",
|
||||
format!("casting error: {}", e),
|
||||
@ -384,8 +384,8 @@ pub fn create_column(
|
||||
|
||||
Ok(Column::new(casted.name().into(), values))
|
||||
}
|
||||
DataType::Time64(timeunit) | DataType::Duration(timeunit) => {
|
||||
let casted = series.time64_nanosecond().map_err(|e| {
|
||||
DataType::Time => {
|
||||
let casted = series.time().map_err(|e| {
|
||||
ShellError::labeled_error(
|
||||
"Casting error",
|
||||
format!("casting error: {}", e),
|
||||
@ -398,14 +398,7 @@ pub fn create_column(
|
||||
.skip(from_row)
|
||||
.take(size)
|
||||
.map(|v| match v {
|
||||
Some(a) => {
|
||||
let nanoseconds = match timeunit {
|
||||
TimeUnit::Second => a / 1_000_000_000,
|
||||
TimeUnit::Millisecond => a / 1_000_000,
|
||||
TimeUnit::Microsecond => a / 1_000,
|
||||
TimeUnit::Nanosecond => a,
|
||||
};
|
||||
|
||||
Some(nanoseconds) => {
|
||||
let untagged = if let Some(bigint) = BigInt::from_i64(nanoseconds) {
|
||||
UntaggedValue::Primitive(Primitive::Duration(bigint))
|
||||
} else {
|
||||
@ -633,7 +626,8 @@ pub fn from_parsed_columns(
|
||||
}
|
||||
});
|
||||
|
||||
let res = ChunkedArray::<Date64Type>::new_from_opt_iter(&name, it);
|
||||
let res: DatetimeChunked =
|
||||
ChunkedArray::<Int64Type>::new_from_opt_iter(&name, it).into();
|
||||
|
||||
df_series.push(res.into_series())
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ impl PartialEq for NuDataFrame {
|
||||
// Casting needed to compare other numeric types with nushell numeric type.
|
||||
// In nushell we only have i64 integer numeric types and any array created
|
||||
// with nushell untagged primitives will be of type i64
|
||||
DataType::UInt32 => match self_series.cast_with_dtype(&DataType::Int64) {
|
||||
DataType::UInt32 => match self_series.cast(&DataType::Int64) {
|
||||
Ok(series) => series,
|
||||
Err(_) => return false,
|
||||
},
|
||||
|
@ -28,6 +28,8 @@ pub enum CommandAction {
|
||||
PreviousShell,
|
||||
/// Go to the next shell in the shell ring buffer
|
||||
NextShell,
|
||||
/// Jump to the specified shell in the shell ring buffer
|
||||
GotoShell(usize),
|
||||
/// Leave the current shell. If it's the last shell, exit out of Nu
|
||||
LeaveShell(i32),
|
||||
}
|
||||
@ -51,6 +53,7 @@ impl PrettyDebug for CommandAction {
|
||||
CommandAction::AddPlugins(..) => DbgDocBldr::description("add plugins"),
|
||||
CommandAction::PreviousShell => DbgDocBldr::description("previous shell"),
|
||||
CommandAction::NextShell => DbgDocBldr::description("next shell"),
|
||||
CommandAction::GotoShell(_) => DbgDocBldr::description("goto shell"),
|
||||
CommandAction::LeaveShell(_) => DbgDocBldr::description("leave shell"),
|
||||
CommandAction::UnloadConfig(cfg) => {
|
||||
DbgDocBldr::description(format!("unload config {:?}", cfg))
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "nu-serde"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
edition = "2018"
|
||||
authors = ["The Nu Project Contributors"]
|
||||
description = "Turn any value into a nu-protocol::Value with serde"
|
||||
@ -10,8 +10,8 @@ documentation = "https://docs.rs/nu-serde"
|
||||
|
||||
[dependencies]
|
||||
bigdecimal = { package = "bigdecimal-rs", version = "0.2.1", features = ["serde"] }
|
||||
nu-protocol = { version = "0.37.0", path = "../nu-protocol" }
|
||||
nu-source = { version = "0.37.0", path = "../nu-source" }
|
||||
nu-protocol = { version = "0.40.0", path = "../nu-protocol" }
|
||||
nu-source = { version = "0.40.0", path = "../nu-source" }
|
||||
serde = "1"
|
||||
thiserror = "1"
|
||||
|
||||
|
@ -4,7 +4,7 @@ description = "A source string characterizer for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-source"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
@ -605,6 +605,16 @@ impl Span {
|
||||
Span::new(self.start, other.end)
|
||||
}
|
||||
|
||||
pub fn merge(&self, other: impl Into<Span>) -> Span {
|
||||
let other = other.into();
|
||||
|
||||
if other.end < self.start {
|
||||
other.until(self)
|
||||
} else {
|
||||
self.until(other)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a new Span by merging a later Span with the current Span.
|
||||
///
|
||||
/// If the given Span is of the None variant,
|
||||
|
@ -4,14 +4,12 @@ description = "Nushell stream"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-stream"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[dependencies]
|
||||
nu-errors = { version = "0.37.0", path="../nu-errors" }
|
||||
nu-protocol = { version = "0.37.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.37.0", path="../nu-source" }
|
||||
|
||||
futures = { version="0.3.12", features=["compat", "io-compat"] }
|
||||
nu-errors = { version = "0.40.0", path="../nu-errors" }
|
||||
nu-protocol = { version = "0.40.0", path="../nu-protocol" }
|
||||
nu-source = { version = "0.40.0", path="../nu-source" }
|
||||
|
||||
[features]
|
||||
stable = []
|
||||
|
@ -4,7 +4,7 @@ description = "Nushell table printing"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-table"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[[bin]]
|
||||
@ -12,7 +12,9 @@ name = "table"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
nu-ansi-term = { version = "0.37.0", path="../nu-ansi-term" }
|
||||
atty = "0.2.14"
|
||||
nu-ansi-term = { version = "0.40.0", path="../nu-ansi-term" }
|
||||
|
||||
regex = "1.4"
|
||||
strip-ansi-escapes = "0.1.1"
|
||||
unicode-width = "0.1.8"
|
||||
|
@ -27,8 +27,18 @@ fn main() {
|
||||
let color_hm: HashMap<String, nu_ansi_term::Style> = HashMap::new();
|
||||
// Capture the table as a string
|
||||
let output_table = draw_table(&table, width, &color_hm);
|
||||
// Draw the table
|
||||
println!("{}", output_table)
|
||||
|
||||
if atty::is(atty::Stream::Stdout) {
|
||||
// Draw the table with ansi colors
|
||||
println!("{}", output_table)
|
||||
} else {
|
||||
// Draw the table without ansi colors
|
||||
if let Ok(bytes) = strip_ansi_escapes::strip(&output_table) {
|
||||
println!("{}", String::from_utf8_lossy(&bytes))
|
||||
} else {
|
||||
println!("{}", output_table)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn make_table_data() -> (Vec<&'static str>, Vec<&'static str>) {
|
||||
|
@ -918,7 +918,17 @@ impl WrappedTable {
|
||||
output.push_str(&self.print_separator(SeparatorPosition::Bottom, color_hm));
|
||||
}
|
||||
|
||||
output
|
||||
if atty::is(atty::Stream::Stdout) {
|
||||
// Draw the table with ansi colors
|
||||
output
|
||||
} else {
|
||||
// Draw the table without ansi colors
|
||||
if let Ok(bytes) = strip_ansi_escapes::strip(&output) {
|
||||
String::from_utf8_lossy(&bytes).to_string()
|
||||
} else {
|
||||
output
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4,21 +4,19 @@ description = "Support for writing Nushell tests"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-test-support"
|
||||
version = "0.37.0"
|
||||
version = "0.40.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = { version = "0.37.0", path="../nu-errors" }
|
||||
nu-path = { version = "0.37.0", path="../nu-path" }
|
||||
nu-protocol = { path="../nu-protocol", version = "0.37.0" }
|
||||
nu-source = { path="../nu-source", version = "0.37.0" }
|
||||
nu-value-ext = { version = "0.37.0", path="../nu-value-ext" }
|
||||
nu-errors = { version = "0.40.0", path="../nu-errors" }
|
||||
nu-path = { version = "0.40.0", path="../nu-path" }
|
||||
nu-protocol = { path="../nu-protocol", version = "0.40.0" }
|
||||
nu-source = { path="../nu-source", version = "0.40.0" }
|
||||
|
||||
bigdecimal = { package = "bigdecimal-rs", version = "0.2.1", features = ["serde"] }
|
||||
chrono = "0.4.19"
|
||||
dunce = "1.0.1"
|
||||
getset = "0.1.1"
|
||||
glob = "0.3.0"
|
||||
indexmap = { version="1.6.1", features=["serde-1"] }
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user